hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
38cf84fc8273e98da38210470f86d7a40b0862e0 | 554 | require_relative 'python'
module LanguageHandler
class Python5 < Python
LANG_CNAME = '5.x.py'.freeze
private
def execute(file)
execute_command(file)
end
def text_with_custom_header(file_content)
cert_path = ENV['FAKE_CERT_PATH']
file_content.prepend(
"import twilio.rest.resources.base\n"\
"import sys\n"\
"twilio.rest.resources.base.get_cert_file = lambda: '#{cert_path}'\n"\
"sys.modules['twilio.rest.base.resources'] = twilio.rest.resources.base\n"
)
end
end
end
| 23.083333 | 82 | 0.658845 |
e988cfcdd212bfd42468be4656722768a164eeb3 | 1,258 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-acmpca'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - ACM-PCA'
spec.description = 'Official AWS Ruby gem for AWS Certificate Manager Private Certificate Authority (ACM-PCA). This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'https://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['LICENSE.txt', 'CHANGELOG.md', 'VERSION', 'lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-acmpca',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-acmpca/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.112.0')
spec.add_dependency('aws-sigv4', '~> 1.1')
end
| 39.3125 | 157 | 0.668521 |
e90811c3a22cc7b926f388dad48ed044784947bc | 1,011 | class Mdbook < Formula
desc "Create modern online books from Markdown files"
homepage "https://rust-lang.github.io/mdBook/"
url "https://github.com/rust-lang/mdBook/archive/v0.4.4.tar.gz"
sha256 "eaf01085bd25e2efa07b561148afa5e3da3386e5f2c35b245961dc485562c154"
license "MPL-2.0"
head "https://github.com/rust-lang/mdBook.git"
bottle do
cellar :any_skip_relocation
sha256 "2a6d98eb7767042009250b84490dea3582bd7a811dc2374d616ce602c826eafb" => :big_sur
sha256 "59595db021b9ae3e8a2137f7c9deb740508cf110a3822f49715721d571870b1d" => :catalina
sha256 "12bd9ff40b7421497209163efb1e802dd3ca08e5b84fe5f5c7446eae0366d291" => :mojave
sha256 "0671b254178a018b1d5ce8e2d1a880886904ff93caebe3d0867a3a5423e35f6f" => :high_sierra
end
depends_on "rust" => :build
def install
system "cargo", "install", *std_cargo_args
end
test do
# simulate user input to mdbook init
system "sh", "-c", "printf \\n\\n | #{bin}/mdbook init"
system "#{bin}/mdbook", "build"
end
end
| 34.862069 | 93 | 0.755687 |
08ec0073e65133bc8b48378d30ef2bbfa2b97861 | 356 | # frozen_string_literal: true
FactoryBot.define do
factory :org_thirty_day_activity do
name { Faker::Company.name + rand(999_999).to_s }
vanity_url { Faker::Lorem.word + rand(999_999).to_s }
org_type { 1 }
project_count { 2 }
affiliate_count { 20 }
thirty_day_commit_count { 200 }
association :organization
end
end
| 25.428571 | 60 | 0.685393 |
7a81ce3409256c3b8914b129b8a71b4f01fb803f | 27 | module GameUsersHelper
end
| 9 | 22 | 0.888889 |
4afbcbf374a022313d74e57c44b4257bdb53afea | 7,304 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
describe Queries::WorkPackages::Filter::IdFilter, type: :model do
let(:project) { FactoryGirl.build_stubbed(:project) }
let(:query) do
FactoryGirl.build_stubbed(:query, project: project)
end
it_behaves_like 'basic query filter' do
let(:class_key) { :id }
let(:type) { :list }
before do
instance.context = query
end
describe '#available?' do
context 'within a project' do
it 'is true if any work package exists and is visible' do
allow(WorkPackage)
.to receive_message_chain(:visible, :for_projects, :exists?)
.with(no_args)
.with(project)
.with(no_args)
.and_return true
expect(instance).to be_available
end
it 'is fals if no work package exists/ is visible' do
allow(WorkPackage)
.to receive_message_chain(:visible, :for_projects, :exists?)
.with(no_args)
.with(project)
.with(no_args)
.and_return false
expect(instance).not_to be_available
end
end
context 'outside of a project' do
let(:project) { nil }
it 'is true if any work package exists and is visible' do
allow(WorkPackage)
.to receive_message_chain(:visible, :exists?)
.with(no_args)
.and_return true
expect(instance).to be_available
end
it 'is false if no work package exists/ is visible' do
allow(WorkPackage)
.to receive_message_chain(:visible, :exists?)
.with(no_args)
.and_return false
expect(instance).not_to be_available
end
end
end
describe '#ar_object_filter?' do
it 'is true' do
expect(instance).to be_ar_object_filter
end
end
describe '#allowed_values' do
it 'raises an error' do
expect { instance.allowed_values }.to raise_error NotImplementedError
end
end
describe '#value_object' do
it 'raises an error' do
expect { instance.value_objects }.to raise_error NotImplementedError
end
end
describe '#allowed_objects' do
it 'raises an error' do
expect { instance.allowed_objects }.to raise_error NotImplementedError
end
end
describe '#valid_values!' do
let(:visible_wp) { FactoryGirl.build_stubbed(:work_package) }
let(:invisible_wp) { FactoryGirl.build_stubbed(:work_package) }
context 'within a project' do
it 'removes all non existing/non visible ids' do
instance.values = [visible_wp.id.to_s, invisible_wp.id.to_s, '999999']
allow(WorkPackage)
.to receive_message_chain(:visible, :for_projects, :where, :pluck)
.with(no_args)
.with(project)
.with(id: instance.values)
.with(:id)
.and_return([visible_wp.id])
instance.valid_values!
expect(instance.values)
.to match_array [visible_wp.id.to_s]
end
end
context 'outside of a project' do
let(:project) { nil }
it 'removes all non existing/non visible ids' do
instance.values = [visible_wp.id.to_s, invisible_wp.id.to_s, '999999']
allow(WorkPackage)
.to receive_message_chain(:visible, :where, :pluck)
.with(no_args)
.with(id: instance.values)
.with(:id)
.and_return([visible_wp.id])
instance.valid_values!
expect(instance.values)
.to match_array [visible_wp.id.to_s]
end
end
end
describe '#validate' do
let(:visible_wp) { FactoryGirl.build_stubbed(:work_package) }
let(:invisible_wp) { FactoryGirl.build_stubbed(:work_package) }
context 'within a project' do
it 'is valid if only visible wps are values' do
instance.values = [visible_wp.id.to_s]
allow(WorkPackage)
.to receive_message_chain(:visible, :for_projects, :where, :pluck)
.with(no_args)
.with(project)
.with(id: instance.values)
.with(:id)
.and_return([visible_wp.id])
expect(instance).to be_valid
end
it 'is invalid if invisible wps are values' do
instance.values = [invisible_wp.id.to_s, visible_wp.id.to_s]
allow(WorkPackage)
.to receive_message_chain(:visible, :for_projects, :where, :pluck)
.with(no_args)
.with(project)
.with(id: instance.values)
.with(:id)
.and_return([visible_wp.id])
expect(instance).not_to be_valid
end
end
context 'outside of a project' do
let(:project) { nil }
it 'is valid if only visible wps are values' do
instance.values = [visible_wp.id.to_s]
allow(WorkPackage)
.to receive_message_chain(:visible, :where, :pluck)
.with(no_args)
.with(id: instance.values)
.with(:id)
.and_return([visible_wp.id])
expect(instance).to be_valid
end
it 'is invalid if invisible wps are values' do
instance.values = [invisible_wp.id.to_s, visible_wp.id.to_s]
allow(WorkPackage)
.to receive_message_chain(:visible, :where, :pluck)
.with(no_args)
.with(id: instance.values)
.with(:id)
.and_return([visible_wp.id])
expect(instance).not_to be_valid
end
end
end
describe '#where' do
let(:visible_wp) { FactoryGirl.create(:work_package) }
let(:other_wp) { FactoryGirl.create(:work_package) }
before do
visible_wp
other_wp
instance.values = [visible_wp.id.to_s]
instance.operator = '='
end
it 'filters' do
expect(WorkPackage.where(instance.where))
.to match_array [visible_wp]
end
end
end
end
| 29.691057 | 91 | 0.614732 |
9127ab3e3eca32a4b3dbf6faedb31e90741f84dc | 2,061 | # Copyright (c) 2013-2014 Irrational Industries Inc. d.b.a. Nitrous.IO
# This software is licensed under the [BSD 2-Clause license](https://raw.github.com/nitrous-io/autoparts/master/LICENSE).
require 'pathname'
HOME_PATH = Pathname.new(Dir.home)
AUTOPARTS_ROOT_PATH = HOME_PATH + '.parts'
AUTOPARTS_APP_PATH = AUTOPARTS_ROOT_PATH + 'autoparts'
class ExecutionFailedError < StandardError
def initialize(cmd)
super("\"#{cmd}\" failed")
end
end
def execute(*args)
args = args.map(&:to_s)
unless system(*args)
raise ExecutionFailedError.new args.join(' ')
end
end
def inject_parts_init(path)
puts "=> Injecting init script into ~/#{path.basename}"
relative_autoparts_bin_path = AUTOPARTS_APP_PATH.relative_path_from(HOME_PATH) + 'bin'
file = File.read(path)
File.open(path, 'a') do |f|
export_path = "export PATH=\"$HOME/#{relative_autoparts_bin_path}:$PATH\"\n"
parts_init = "eval \"$(parts env)\"\n"
f.write "\n"
f.write export_path unless file.include? export_path
f.write parts_init unless file.include? parts_init
end
end
if AUTOPARTS_APP_PATH.exist? && AUTOPARTS_APP_PATH.children.any?
abort "setup: It appears that Autoparts is already installed on your box. If you want to reinstall Autoparts, please make sure that your \"#{AUTOPARTS_APP_PATH}\" directory is empty."
end
begin
AUTOPARTS_ROOT_PATH.mkpath
puts "=> Downloading Autoparts..."
execute 'git', 'clone', 'https://github.com/action-io/autoparts.git', AUTOPARTS_APP_PATH
bash_profile_path = HOME_PATH + '.bash_profile'
bashrc_path = HOME_PATH + '.bashrc'
zshrc_path = HOME_PATH + '.zshrc'
if bash_profile_path.exist?
inject_parts_init(bash_profile_path)
elsif bashrc_path.exist?
inject_parts_init(bashrc_path)
end
if zshrc_path.exist?
inject_parts_init(zshrc_path)
end
puts "=> Installation complete!"
puts "\nPlease reopen this shell or enter the following command:\n exec $SHELL -l"
rescue => e
AUTOPARTS_APP_PATH.rmtree if AUTOPARTS_APP_PATH.exist?
abort "setup: ERROR: #{e}\nAborting!"
end
| 31.707692 | 185 | 0.737021 |
edc044795f71e7f769607426a1c97ca473809bbc | 141 | Dir["#{File.dirname(path = File.expand_path(__FILE__))}/*.rb"].
each { |test| next if test == path || test =~ /_test.rb$/; require test }
| 47 | 75 | 0.617021 |
5d585c7fac988050d6dcaaefd37cd061642d24f6 | 953 | # coding: utf-8
# vim: et ts=2 sw=2
RSpec.describe HrrRbNetconf::Server::Error::PartialOperation do
let(:tag){ 'partial-operation' }
let(:type){ ['application'] }
let(:severity){ ['error'] }
let(:info){ ['ok-element', 'err-element', 'noop-element'] }
it "can be looked up in HrrRbNetconf::Server::Error dictionary" do
expect( HrrRbNetconf::Server::Error[tag] ).to eq described_class
end
it "is registered in HrrRbNetconf::Server::Error.list" do
expect( HrrRbNetconf::Server::Error.list ).to include tag
end
it "includes HrrRbNetconf::Server::Error::RpcErrorable" do
expect( described_class.include? HrrRbNetconf::Server::Error::RpcErrorable ).to be true
end
it "has correct TYPE" do
expect( described_class::TYPE ).to eq type
end
it "has correct SEVERITY" do
expect( described_class::SEVERITY ).to eq severity
end
it "has correct INFO" do
expect( described_class::INFO ).to eq info
end
end
| 28.029412 | 91 | 0.697796 |
0181b20b42b1109af5306f3d12cac69f543884a3 | 709 | class RailsBlogController < ApplicationController
# USE THIS METHODS WHEN YOU NEED TO SKIP AUTH FILTERS
#
# skip_before_filter :authenticate_user!, only: [:method_name]
# skip_before_filter :blog_admin_required!, only: [:method_name]
before_action :authenticate_user!, except: %w[ index show ]
before_action :blog_admin_required!, except: %w[ index show ]
before_action :owner_required, except: %w[ index show ]
layout ->{ layout_for_action }
private
def blog_admin_required!
redirect_to root_path unless current_user.try(:admin?)
end
def layout_for_action
return 'rails_blog_frontend' if %w[ index show ].include?(action_name)
'rails_blog_backend'
end
end | 30.826087 | 74 | 0.74189 |
b91ab592691f04e70270e46b22cc87ff3df2b019 | 19,481 | class Fdroidserver < Formula
include Language::Python::Virtualenv
desc "Create and manage Android app repositories for F-Droid"
homepage "https://f-droid.org"
url "https://files.pythonhosted.org/packages/63/a9/327d10e876b7c5750564035151bae9f65268e3b6aa6163cd519f43b48bf3/fdroidserver-1.1.9.tar.gz"
sha256 "420904baf24c8a5b8485898f3a3d1f6c2c52b4599ef4df443ebc901fd2710754"
bottle do
cellar :any
sha256 "b6c78f576d5efc31e4094e61c13fb2ad8d374aa76a458e1bd309040f197c0526" => :catalina
sha256 "b662c1a4d1eeba40e43243e91b499605ab1e1758321beab10f9a3848e50c9d40" => :mojave
sha256 "7202bafc628326f0b4951ec1d13c3803dc3385abc2b9683fcf45db4248b87f9b" => :high_sierra
sha256 "82fa826d3ec20fabc5fa51844561e78a342dca29f5c8772e2a991e5f74904981" => :x86_64_linux
end
depends_on "pkg-config" => :build
depends_on "freetype"
depends_on "jpeg"
depends_on "libtiff"
depends_on "[email protected]"
depends_on "[email protected]"
depends_on "s3cmd"
depends_on "webp"
uses_from_macos "libxml2"
uses_from_macos "libxslt"
uses_from_macos "zlib"
resource "androguard" do
url "https://files.pythonhosted.org/packages/83/78/0f44e8f0fd10493b3118d79d60599c93e5a2cd378d83054014600a620cba/androguard-3.3.5.tar.gz"
sha256 "f0655ca3a5add74c550951e79bd0bebbd1c5b239178393d30d8db0bd3202cda2"
end
resource "apache-libcloud" do
url "https://files.pythonhosted.org/packages/ae/dd/a505a95ef51f097125cbd277280601e92c26a9ad18986b2846d306f73aca/apache-libcloud-3.1.0.tar.gz"
sha256 "bbb858e045ba5e06d61632c08b9828625766aae30ff4fe727f2bee598c9048ac"
end
if OS.mac?
resource "appnope" do
url "https://files.pythonhosted.org/packages/26/34/0f3a5efac31f27fabce64645f8c609de9d925fe2915304d1a40f544cff0e/appnope-0.1.0.tar.gz"
sha256 "8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71"
end
end
resource "args" do
url "https://files.pythonhosted.org/packages/e5/1c/b701b3f4bd8d3667df8342f311b3efaeab86078a840fb826bd204118cc6b/args-0.1.0.tar.gz"
sha256 "a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"
end
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/9f/3d/8beae739ed8c1c8f00ceac0ab6b0e97299b42da869e24cf82851b27a9123/asn1crypto-1.3.0.tar.gz"
sha256 "5a215cb8dc12f892244e3a113fe05397ee23c5c4ca7a69cd6e69811755efc42d"
end
resource "backcall" do
url "https://files.pythonhosted.org/packages/a2/40/764a663805d84deee23043e1426a9175567db89c8b3287b5c2ad9f71aa93/backcall-0.2.0.tar.gz"
sha256 "5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"
end
resource "bcrypt" do
url "https://files.pythonhosted.org/packages/fa/aa/025a3ab62469b5167bc397837c9ffc486c42a97ef12ceaa6699d8f5a5416/bcrypt-3.1.7.tar.gz"
sha256 "0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/40/a7/ded59fa294b85ca206082306bba75469a38ea1c7d44ea7e1d64f5443d67a/certifi-2020.6.20.tar.gz"
sha256 "5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/05/54/3324b0c46340c31b909fcec598696aaec7ddc8c18a63f2db352562d3354c/cffi-1.14.0.tar.gz"
sha256 "2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "click" do
url "https://files.pythonhosted.org/packages/27/6f/be940c8b1f1d69daceeb0032fee6c34d7bd70e3e649ccac0951500b4720e/click-7.1.2.tar.gz"
sha256 "d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"
end
resource "clint" do
url "https://files.pythonhosted.org/packages/3d/b4/41ecb1516f1ba728f39ee7062b9dac1352d39823f513bb6f9e8aeb86e26d/clint-0.5.1.tar.gz"
sha256 "05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"
end
resource "colorama" do
url "https://files.pythonhosted.org/packages/82/75/f2a4c0c94c85e2693c229142eb448840fba0f9230111faa889d1f541d12d/colorama-0.4.3.tar.gz"
sha256 "e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/56/3b/78c6816918fdf2405d62c98e48589112669f36711e50158a0c15d804c30d/cryptography-2.9.2.tar.gz"
sha256 "a0c30272fb4ddda5f5ffc1089d7405b7a71b0b0f51993cb4e5dbb4590b2fc229"
end
resource "Cycler" do
url "https://files.pythonhosted.org/packages/c2/4b/137dea450d6e1e3d474e1d873cd1d4f7d3beed7e0dc973b06e8e10d32488/cycler-0.10.0.tar.gz"
sha256 "cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"
end
resource "decorator" do
url "https://files.pythonhosted.org/packages/da/93/84fa12f2dc341f8cf5f022ee09e109961055749df2d0c75c5f98746cfe6c/decorator-4.4.2.tar.gz"
sha256 "e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"
end
resource "defusedxml" do
url "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz"
sha256 "f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5"
end
resource "docker-py" do
url "https://files.pythonhosted.org/packages/fa/2d/906afc44a833901fc6fed1a89c228e5c88fbfc6bd2f3d2f0497fdfb9c525/docker-py-1.10.6.tar.gz"
sha256 "4c2a75875764d38d67f87bc7d03f7443a3895704efc57962bdf6500b8d4bc415"
end
resource "docker-pycreds" do
url "https://files.pythonhosted.org/packages/c5/e6/d1f6c00b7221e2d7c4b470132c931325c8b22c51ca62417e300f5ce16009/docker-pycreds-0.4.0.tar.gz"
sha256 "6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4"
end
resource "gitdb" do
url "https://files.pythonhosted.org/packages/d1/05/eaf2ac564344030d8b3ce870b116d7bb559020163e80d9aa4a3d75f3e820/gitdb-4.0.5.tar.gz"
sha256 "c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"
end
resource "GitPython" do
url "https://files.pythonhosted.org/packages/5b/ef/96dd6b06400821bbad3f7e275f4a4f88af324124c5c04958e2f2c14ce2c8/GitPython-3.1.3.tar.gz"
sha256 "e107af4d873daed64648b4f4beb89f89f0cfbe3ef558fc7821ed2331c2f8da1a"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/cb/19/57503b5de719ee45e83472f339f617b0c01ad75cba44aba1e4c97c2b0abd/idna-2.9.tar.gz"
sha256 "7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"
end
resource "ipython" do
url "https://files.pythonhosted.org/packages/22/2b/49a79bcd466b50aa0ea12411fa752e74984935363bb93ed5931fd0449309/ipython-7.15.0.tar.gz"
sha256 "0ef1433879816a960cd3ae1ae1dc82c64732ca75cec8dab5a4e29783fb571d0e"
end
resource "ipython_genutils" do
url "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz"
sha256 "eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"
end
resource "jedi" do
url "https://files.pythonhosted.org/packages/2e/86/3ea824e61de521b2abd9ada9a080375c01721e66266ccc8ba8b3576ad88a/jedi-0.17.1.tar.gz"
sha256 "807d5d4f96711a2bcfdd5dfa3b1ae6d09aa53832b182090b222b5efb81f52f63"
end
resource "kiwisolver" do
url "https://files.pythonhosted.org/packages/62/b8/db619d97819afb52a3ff5ff6ad3f7de408cc83a8ec2dfb31a1731c0a97c2/kiwisolver-1.2.0.tar.gz"
sha256 "247800260cd38160c362d211dcaf4ed0f7816afb5efe56544748b21d6ad6d17f"
end
resource "lxml" do
url "https://files.pythonhosted.org/packages/03/a8/73d795778143be51d8b86750b371b3efcd7139987f71618ad9f4b8b65543/lxml-4.5.1.tar.gz"
sha256 "27ee0faf8077c7c1a589573b1450743011117f1aa1a91d5ae776bbc5ca6070f2"
end
resource "matplotlib" do
url "https://files.pythonhosted.org/packages/9c/4b/06f4aa9bef6b5e4f177881b4dedd94faa6e7cb3d95dfaeaa8a1a8b541095/matplotlib-3.2.2.tar.gz"
sha256 "3d77a6630d093d74cbbfebaa0571d00790966be1ed204e4a8239f5cbd6835c5d"
end
resource "mwclient" do
url "https://files.pythonhosted.org/packages/97/b4/5fc70ad3286a8d8ec4b9ac01acad0f6b00c5a48d4a16b9d3be6519b7eb21/mwclient-0.10.1.tar.gz"
sha256 "79363dd8d12f5e3b91b92b63152bf9dfef27da786c076a244e1f148c8dd67139"
end
resource "networkx" do
url "https://files.pythonhosted.org/packages/bf/63/7b579dd3b1c49ce6b7fd8f6f864038f255201410905dd183cf7f4a3845cf/networkx-2.4.tar.gz"
sha256 "f8f4ff0b6f96e4f9b16af6b84622597b5334bf9cae8cf9b2e42e7985d5c95c64"
end
resource "numpy" do
url "https://files.pythonhosted.org/packages/f1/2c/717bdd12404c73ec0c8c734c81a0bad7048866bc36a88a1b69fd52b01c07/numpy-1.19.0.zip"
sha256 "76766cc80d6128750075378d3bb7812cf146415bd29b588616f72c943c00d598"
end
resource "oauthlib" do
url "https://files.pythonhosted.org/packages/fc/c7/829c73c64d3749da7811c06319458e47f3461944da9d98bb4df1cb1598c2/oauthlib-3.1.0.tar.gz"
sha256 "bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889"
end
resource "paramiko" do
url "https://files.pythonhosted.org/packages/ac/15/4351003352e11300b9f44a13576bff52dcdc6e4a911129c07447bda0a358/paramiko-2.7.1.tar.gz"
sha256 "920492895db8013f6cc0179293147f830b8c7b21fdfc839b6bad760c27459d9f"
end
resource "parso" do
url "https://files.pythonhosted.org/packages/fe/24/c30eb4be8a24b965cfd6e2e6b41180131789b44042112a16f9eb10c80f6e/parso-0.7.0.tar.gz"
sha256 "908e9fae2144a076d72ae4e25539143d40b8e3eafbaeae03c1bfe226f4cdf12c"
end
resource "pexpect" do
url "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz"
sha256 "fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"
end
resource "pickleshare" do
url "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz"
sha256 "87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"
end
resource "Pillow" do
url "https://files.pythonhosted.org/packages/ce/ef/e793f6ffe245c960c42492d0bb50f8d14e2ba223f1922a5c3c81569cec44/Pillow-7.1.2.tar.gz"
sha256 "a0b49960110bc6ff5fead46013bcb8825d101026d466f3a4de3476defe0fb0dd"
end
resource "prompt-toolkit" do
url "https://files.pythonhosted.org/packages/69/19/3aa4bf17e1cbbdfe934eb3d5b394ae9a0a7fb23594a2ff27e0fdaf8b4c59/prompt_toolkit-3.0.5.tar.gz"
sha256 "563d1a4140b63ff9dd587bda9557cffb2fe73650205ab6f4383092fb882e7dc8"
end
resource "ptyprocess" do
url "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz"
sha256 "923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0"
end
resource "pyasn1" do
url "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz"
sha256 "aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"
end
resource "pyasn1-modules" do
url "https://files.pythonhosted.org/packages/ab/76/36ab0e099e6bd27ed95b70c2c86c326d3affa59b9b535c63a2f892ac9f45/pyasn1-modules-0.2.1.tar.gz"
sha256 "af00ea8f2022b6287dc375b2c70f31ab5af83989fc6fe9eacd4976ce26cd7ccc"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz"
sha256 "2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"
end
resource "pydot" do
url "https://files.pythonhosted.org/packages/5f/e2/23e053ccf5648153959ea15d77fb90adb2b1f9c9360f832f39d6d6c024e2/pydot-1.4.1.tar.gz"
sha256 "d49c9d4dd1913beec2a997f831543c8cbd53e535b1a739e921642fe416235f01"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/6e/4d/4d2fe93a35dfba417311a4ff627489a947b01dc0cc377a3673c00cf7e4b2/Pygments-2.6.1.tar.gz"
sha256 "647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44"
end
resource "PyNaCl" do
url "https://files.pythonhosted.org/packages/cf/5a/25aeb636baeceab15c8e57e66b8aa930c011ec1c035f284170cacb05025e/PyNaCl-1.4.0.tar.gz"
sha256 "54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505"
end
resource "pyparsing" do
url "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz"
sha256 "c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz"
sha256 "73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"
end
resource "python-vagrant" do
url "https://files.pythonhosted.org/packages/bb/c6/0a6d22ae1782f261fc4274ea9385b85bf792129d7126575ec2a71d8aea18/python-vagrant-0.5.15.tar.gz"
sha256 "af9a8a9802d382d45dbea96aa3cfbe77c6e6ad65b3fe7b7c799d41ab988179c6"
end
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz"
sha256 "b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"
end
resource "qrcode" do
url "https://files.pythonhosted.org/packages/19/d5/6c7d4e103d94364d067636417a77a6024219c58cd6e9f428ece9b5061ef9/qrcode-6.1.tar.gz"
sha256 "505253854f607f2abf4d16092c61d4e9d511a3b4392e60bff957a68592b04369"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/da/67/672b422d9daf07365259958912ba533a0ecab839d4084c487a5fe9a5405f/requests-2.24.0.tar.gz"
sha256 "b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"
end
resource "requests-oauthlib" do
url "https://files.pythonhosted.org/packages/23/eb/68fc8fa86e0f5789832f275c8289257d8dc44dbe93fce7ff819112b9df8f/requests-oauthlib-1.3.0.tar.gz"
sha256 "b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a"
end
resource "ruamel.yaml" do
url "https://files.pythonhosted.org/packages/16/8b/54a26c1031595e5edd0e616028b922d78d8ffba8bc775f0a4faeada846cc/ruamel.yaml-0.16.10.tar.gz"
sha256 "099c644a778bf72ffa00524f78dd0b6476bca94a1da344130f4bf3381ce5b954"
end
resource "ruamel.yaml.clib" do
url "https://files.pythonhosted.org/packages/92/28/612085de3fae9f82d62d80255d9f4cf05b1b341db1e180adcf28c1bf748d/ruamel.yaml.clib-0.2.0.tar.gz"
sha256 "b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c"
end
resource "six" do
url "https://files.pythonhosted.org/packages/6b/34/415834bfdafca3c5f451532e8a8d9ba89a21c9743a0c59fbd0205c7f9426/six-1.15.0.tar.gz"
sha256 "30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"
end
resource "smmap" do
url "https://files.pythonhosted.org/packages/75/fb/2f594e5364f9c986b2c89eb662fc6067292cb3df2b88ae31c939b9138bb9/smmap-3.0.4.tar.gz"
sha256 "9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"
end
resource "traitlets" do
url "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz"
sha256 "d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/05/8c/40cd6949373e23081b3ea20d5594ae523e681b6f472e600fbc95ed046a36/urllib3-1.25.9.tar.gz"
sha256 "3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/89/38/459b727c381504f361832b9e5ace19966de1a235d73cdbdea91c771a1155/wcwidth-0.2.5.tar.gz"
sha256 "c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
end
resource "websocket_client" do
url "https://files.pythonhosted.org/packages/8b/0f/52de51b9b450ed52694208ab952d5af6ebbcbce7f166a48784095d930d8c/websocket_client-0.57.0.tar.gz"
sha256 "d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010"
end
def install
bash_completion.install "completion/bash-completion" => "fdroid"
venv = virtualenv_create(libexec, "python3")
resource("Pillow").stage do
inreplace "setup.py" do |s|
zlib = Formula["zlib"].opt_prefix
jpeg = Formula["jpeg"].opt_prefix
freetype = Formula["freetype"].opt_prefix
if OS.mac?
sdkprefix = MacOS.sdk_path_if_needed ? MacOS.sdk_path : "" if OS.mac?
s.gsub! "ZLIB_ROOT = None", "ZLIB_ROOT = ('#{sdkprefix}/usr/lib', '#{sdkprefix}/usr/include')"
else
s.gsub! "ZLIB_ROOT = None", "ZLIB_ROOT = ('#{zlib}/lib', '#{zlib}/include')"
end
s.gsub! "openjpeg.h", "probably_not_a_header_called_this_eh.h"
s.gsub! "JPEG_ROOT = None", "JPEG_ROOT = ('#{jpeg}/lib', '#{jpeg}/include')"
s.gsub! "FREETYPE_ROOT = None", "FREETYPE_ROOT = ('#{freetype}/lib', '#{freetype}/include')"
end
# avoid triggering "helpful" distutils code that doesn't recognize Xcode 7 .tbd stubs
ENV.delete "SDKROOT"
if OS.mac? && !MacOS::CLT.installed?
ENV.append "CFLAGS", "-I#{MacOS.sdk_path}/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers"
end
venv.pip_install Pathname.pwd
end
# Fix "ld: file not found: /usr/lib/system/libsystem_darwin.dylib" for lxml
if OS.mac?
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version == :sierra
end
venv.pip_install resource("lxml")
ENV.delete "SDKROOT" # avoid matplotlib build failure on 10.12
resource("ptyprocess").stage do
# Without removing this file, `pip` will ignore the `setup.py` file and
# attempt to download the [`flit`](https://github.com/takluyver/flit)
# build system.
rm_f "pyproject.toml"
venv.pip_install Pathname.pwd
end
venv.pip_install resource("cffi") # or bcrypt fails to build
res = resources.map(&:name).to_set - %w[cffi lxml Pillow ptyprocess]
res.each do |r|
venv.pip_install resource(r)
end
venv.pip_install_and_link buildpath
doc.install "examples"
end
def caveats
<<~EOS
In order to function, fdroidserver requires that the Android SDK's
"Build-tools" and "Platform-tools" are installed. Also, it is best if the
base path of the Android SDK is set in the standard environment variable
ANDROID_HOME. To install them from the command line, run:
android update sdk --no-ui --all --filter tools,platform-tools,build-tools-25.0.0
EOS
end
test do
# locales aren't set correctly within the testing environment
ENV["LC_ALL"] = "en_US.UTF-8"
ENV["LANG"] = "en_US.UTF-8"
# fdroid prefers to work in a dir called 'fdroid'
mkdir testpath/"fdroid" do
mkdir "repo"
mkdir "metadata"
open("config.py", "w") do |f|
f << "gradle = 'gradle'"
end
open("metadata/fake.txt", "w") do |f|
f << "License:GPL-3.0-or-later\n"
f << "Summary:Yup still fake\n"
f << "Categories:Internet\n"
f << "Description:\n"
f << "this is fake\n"
f << ".\n"
end
system "#{bin}/fdroid", "checkupdates", "--verbose", "--allow-dirty"
system "#{bin}/fdroid", "lint", "--verbose"
system "#{bin}/fdroid", "rewritemeta", "fake", "--verbose"
system "#{bin}/fdroid", "scanner", "--verbose"
end
end
end
| 45.622951 | 147 | 0.792208 |
876ccc69d8d0ad50cada764b52fd2ffcef6437b4 | 26,171 | require 'carrierwave/orm/activerecord'
class User < ActiveRecord::Base
extend Gitlab::ConfigHelper
include Gitlab::ConfigHelper
include Gitlab::CurrentSettings
include Referable
include Sortable
include CaseSensitivity
include TokenAuthenticatable
DEFAULT_NOTIFICATION_LEVEL = :participating
add_authentication_token_field :authentication_token
default_value_for :admin, false
default_value_for :external, false
default_value_for :can_create_group, gitlab_config.default_can_create_group
default_value_for :can_create_team, false
default_value_for :hide_no_ssh_key, false
default_value_for :hide_no_password, false
default_value_for :theme_id, gitlab_config.default_theme
attr_encrypted :otp_secret,
key: Gitlab::Application.config.secret_key_base,
mode: :per_attribute_iv_and_salt,
algorithm: 'aes-256-cbc'
devise :two_factor_authenticatable,
otp_secret_encryption_key: Gitlab::Application.config.secret_key_base
devise :two_factor_backupable, otp_number_of_backup_codes: 10
serialize :otp_backup_codes, JSON
devise :lockable, :recoverable, :rememberable, :trackable,
:validatable, :omniauthable, :confirmable, :registerable
attr_accessor :force_random_password
# Virtual attribute for authenticating by either username or email
attr_accessor :login
#
# Relations
#
# Namespace for personal projects
has_one :namespace, -> { where type: nil }, dependent: :destroy, foreign_key: :owner_id, class_name: "Namespace"
# Profile
has_many :keys, dependent: :destroy
has_many :emails, dependent: :destroy
has_many :personal_access_tokens, dependent: :destroy
has_many :identities, dependent: :destroy, autosave: true
has_many :u2f_registrations, dependent: :destroy
# Groups
has_many :members, dependent: :destroy
has_many :group_members, dependent: :destroy, source: 'GroupMember'
has_many :groups, through: :group_members
has_many :owned_groups, -> { where members: { access_level: Gitlab::Access::OWNER } }, through: :group_members, source: :group
has_many :masters_groups, -> { where members: { access_level: Gitlab::Access::MASTER } }, through: :group_members, source: :group
# Projects
has_many :groups_projects, through: :groups, source: :projects
has_many :personal_projects, through: :namespace, source: :projects
has_many :project_members, dependent: :destroy, class_name: 'ProjectMember'
has_many :projects, through: :project_members
has_many :created_projects, foreign_key: :creator_id, class_name: 'Project'
has_many :users_star_projects, dependent: :destroy
has_many :starred_projects, through: :users_star_projects, source: :project
has_many :snippets, dependent: :destroy, foreign_key: :author_id, class_name: "Snippet"
has_many :issues, dependent: :destroy, foreign_key: :author_id
has_many :notes, dependent: :destroy, foreign_key: :author_id
has_many :merge_requests, dependent: :destroy, foreign_key: :author_id
has_many :events, dependent: :destroy, foreign_key: :author_id, class_name: "Event"
has_many :subscriptions, dependent: :destroy
has_many :recent_events, -> { order "id DESC" }, foreign_key: :author_id, class_name: "Event"
has_many :assigned_issues, dependent: :destroy, foreign_key: :assignee_id, class_name: "Issue"
has_many :assigned_merge_requests, dependent: :destroy, foreign_key: :assignee_id, class_name: "MergeRequest"
has_many :oauth_applications, class_name: 'Doorkeeper::Application', as: :owner, dependent: :destroy
has_one :abuse_report, dependent: :destroy
has_many :spam_logs, dependent: :destroy
has_many :builds, dependent: :nullify, class_name: 'Ci::Build'
has_many :todos, dependent: :destroy
has_many :notification_settings, dependent: :destroy
has_many :award_emoji, as: :awardable, dependent: :destroy
#
# Validations
#
validates :name, presence: true
validates :notification_email, presence: true, email: true
validates :public_email, presence: true, uniqueness: true, email: true, allow_blank: true
validates :bio, length: { maximum: 255 }, allow_blank: true
validates :projects_limit, presence: true, numericality: { greater_than_or_equal_to: 0 }
validates :username,
namespace: true,
presence: true,
uniqueness: { case_sensitive: false }
validate :namespace_uniq, if: ->(user) { user.username_changed? }
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validate :unique_email, if: ->(user) { user.email_changed? }
validate :owns_notification_email, if: ->(user) { user.notification_email_changed? }
validate :owns_public_email, if: ->(user) { user.public_email_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
before_validation :generate_password, on: :create
before_validation :restricted_signup_domains, on: :create
before_validation :sanitize_attrs
before_validation :set_notification_email, if: ->(user) { user.email_changed? }
before_validation :set_public_email, if: ->(user) { user.public_email_changed? }
after_update :update_emails_with_primary_email, if: ->(user) { user.email_changed? }
before_save :ensure_authentication_token
before_save :ensure_external_user_rights
after_save :ensure_namespace_correct
after_initialize :set_projects_limit
before_create :check_confirmation_email
after_create :post_create_hook
after_destroy :post_destroy_hook
# User's Layout preference
enum layout: [:fixed, :fluid]
# User's Dashboard preference
# Note: When adding an option, it MUST go on the end of the array.
enum dashboard: [:projects, :stars, :project_activity, :starred_project_activity, :groups, :todos]
# User's Project preference
# Note: When adding an option, it MUST go on the end of the array.
enum project_view: [:readme, :activity, :files]
alias_attribute :private_token, :authentication_token
delegate :path, to: :namespace, allow_nil: true, prefix: true
state_machine :state, initial: :active do
event :block do
transition active: :blocked
transition ldap_blocked: :blocked
end
event :ldap_block do
transition active: :ldap_blocked
end
event :activate do
transition blocked: :active
transition ldap_blocked: :active
end
state :blocked, :ldap_blocked do
def blocked?
true
end
end
end
mount_uploader :avatar, AvatarUploader
# Scopes
scope :admins, -> { where(admin: true) }
scope :blocked, -> { with_states(:blocked, :ldap_blocked) }
scope :external, -> { where(external: true) }
scope :active, -> { with_state(:active) }
scope :not_in_project, ->(project) { project.users.present? ? where("id not in (:ids)", ids: project.users.map(&:id) ) : all }
scope :without_projects, -> { where('id NOT IN (SELECT DISTINCT(user_id) FROM members)') }
def self.with_two_factor
joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id").
where("u2f.id IS NOT NULL OR otp_required_for_login = ?", true).distinct(arel_table[:id])
end
def self.without_two_factor
joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id").
where("u2f.id IS NULL AND otp_required_for_login = ?", false)
end
#
# Class methods
#
class << self
# Devise method overridden to allow sign in with email or username
def find_for_database_authentication(warden_conditions)
conditions = warden_conditions.dup
if login = conditions.delete(:login)
where(conditions).find_by("lower(username) = :value OR lower(email) = :value", value: login.downcase)
else
find_by(conditions)
end
end
def sort(method)
case method.to_s
when 'recent_sign_in' then reorder(last_sign_in_at: :desc)
when 'oldest_sign_in' then reorder(last_sign_in_at: :asc)
else
order_by(method)
end
end
# Find a User by their primary email or any associated secondary email
def find_by_any_email(email)
sql = 'SELECT *
FROM users
WHERE id IN (
SELECT id FROM users WHERE email = :email
UNION
SELECT emails.user_id FROM emails WHERE email = :email
)
LIMIT 1;'
User.find_by_sql([sql, { email: email }]).first
end
def filter(filter_name)
case filter_name
when 'admins'
self.admins
when 'blocked'
self.blocked
when 'two_factor_disabled'
self.without_two_factor
when 'two_factor_enabled'
self.with_two_factor
when 'wop'
self.without_projects
when 'external'
self.external
else
self.active
end
end
# Searches users matching the given query.
#
# This method uses ILIKE on PostgreSQL and LIKE on MySQL.
#
# query - The search query as a String
#
# Returns an ActiveRecord::Relation.
def search(query)
table = arel_table
pattern = "%#{query}%"
where(
table[:name].matches(pattern).
or(table[:email].matches(pattern)).
or(table[:username].matches(pattern))
)
end
def by_login(login)
return nil unless login
if login.include?('@'.freeze)
unscoped.iwhere(email: login).take
else
unscoped.iwhere(username: login).take
end
end
def find_by_username!(username)
find_by!('lower(username) = ?', username.downcase)
end
def find_by_personal_access_token(token_string)
personal_access_token = PersonalAccessToken.active.find_by_token(token_string) if token_string
personal_access_token.user if personal_access_token
end
def by_username_or_id(name_or_id)
find_by('users.username = ? OR users.id = ?', name_or_id.to_s, name_or_id.to_i)
end
def build_user(attrs = {})
User.new(attrs)
end
def reference_prefix
'@'
end
# Pattern used to extract `@user` user references from text
def reference_pattern
%r{
#{Regexp.escape(reference_prefix)}
(?<user>#{Gitlab::Regex::NAMESPACE_REGEX_STR})
}x
end
end
#
# Instance methods
#
def to_param
username
end
def to_reference(_from_project = nil)
"#{self.class.reference_prefix}#{username}"
end
def generate_password
if self.force_random_password
self.password = self.password_confirmation = Devise.friendly_token.first(8)
end
end
def generate_reset_token
@reset_token, enc = Devise.token_generator.generate(self.class, :reset_password_token)
self.reset_password_token = enc
self.reset_password_sent_at = Time.now.utc
@reset_token
end
def check_confirmation_email
skip_confirmation! unless current_application_settings.send_user_confirmation_email
end
def recently_sent_password_reset?
reset_password_sent_at.present? && reset_password_sent_at >= 1.minute.ago
end
def disable_two_factor!
transaction do
update_attributes(
otp_required_for_login: false,
encrypted_otp_secret: nil,
encrypted_otp_secret_iv: nil,
encrypted_otp_secret_salt: nil,
otp_grace_period_started_at: nil,
otp_backup_codes: nil
)
self.u2f_registrations.destroy_all
end
end
def two_factor_enabled?
two_factor_otp_enabled? || two_factor_u2f_enabled?
end
def two_factor_otp_enabled?
self.otp_required_for_login?
end
def two_factor_u2f_enabled?
self.u2f_registrations.exists?
end
def namespace_uniq
# Return early if username already failed the first uniqueness validation
return if self.errors.key?(:username) &&
self.errors[:username].include?('has already been taken')
namespace_name = self.username
existing_namespace = Namespace.by_path(namespace_name)
if existing_namespace && existing_namespace != self.namespace
self.errors.add(:username, 'has already been taken')
end
end
def avatar_type
unless self.avatar.image?
self.errors.add :avatar, "only images allowed"
end
end
def unique_email
if !self.emails.exists?(email: self.email) && Email.exists?(email: self.email)
self.errors.add(:email, 'has already been taken')
end
end
def owns_notification_email
return if self.temp_oauth_email?
self.errors.add(:notification_email, "is not an email you own") unless self.all_emails.include?(self.notification_email)
end
def owns_public_email
return if self.public_email.blank?
self.errors.add(:public_email, "is not an email you own") unless self.all_emails.include?(self.public_email)
end
def update_emails_with_primary_email
primary_email_record = self.emails.find_by(email: self.email)
if primary_email_record
primary_email_record.destroy
self.emails.create(email: self.email_was)
self.update_secondary_emails!
end
end
# Returns the groups a user has access to
def authorized_groups
union = Gitlab::SQL::Union.
new([groups.select(:id), authorized_projects.select(:namespace_id)])
Group.where("namespaces.id IN (#{union.to_sql})")
end
# Returns projects user is authorized to access.
def authorized_projects(min_access_level = nil)
Project.where("projects.id IN (#{projects_union(min_access_level).to_sql})")
end
def viewable_starred_projects
starred_projects.where("projects.visibility_level IN (?) OR projects.id IN (#{projects_union.to_sql})",
[Project::PUBLIC, Project::INTERNAL])
end
def owned_projects
@owned_projects ||=
Project.where('namespace_id IN (?) OR namespace_id = ?',
owned_groups.select(:id), namespace.id).joins(:namespace)
end
def is_admin?
admin
end
def require_ssh_key?
keys.count == 0
end
def require_password?
password_automatically_set? && !ldap_user?
end
def can_change_username?
gitlab_config.username_changing_enabled
end
def can_create_project?
projects_limit_left > 0
end
def can_create_group?
can?(:create_group, nil)
end
def abilities
Ability.abilities
end
def can_select_namespace?
several_namespaces? || admin
end
def can?(action, subject)
abilities.allowed?(self, action, subject)
end
def first_name
name.split.first unless name.blank?
end
def cared_merge_requests
MergeRequest.cared(self)
end
def projects_limit_left
projects_limit - personal_projects.count
end
def projects_limit_percent
return 100 if projects_limit.zero?
(personal_projects.count.to_f / projects_limit) * 100
end
def recent_push(project_id = nil)
# Get push events not earlier than 2 hours ago
events = recent_events.code_push.where("created_at > ?", Time.now - 2.hours)
events = events.where(project_id: project_id) if project_id
# Use the latest event that has not been pushed or merged recently
events.recent.find do |event|
project = Project.find_by_id(event.project_id)
next unless project
if project.repository.branch_exists?(event.branch_name)
merge_requests = MergeRequest.where("created_at >= ?", event.created_at).
where(source_project_id: project.id,
source_branch: event.branch_name)
merge_requests.empty?
end
end
end
def projects_sorted_by_activity
authorized_projects.sorted_by_activity
end
def several_namespaces?
owned_groups.any? || masters_groups.any?
end
def namespace_id
namespace.try :id
end
def name_with_username
"#{name} (#{username})"
end
def already_forked?(project)
!!fork_of(project)
end
def fork_of(project)
links = ForkedProjectLink.where(forked_from_project_id: project, forked_to_project_id: personal_projects)
if links.any?
links.first.forked_to_project
else
nil
end
end
def ldap_user?
identities.exists?(["provider LIKE ? AND extern_uid IS NOT NULL", "ldap%"])
end
def ldap_identity
@ldap_identity ||= identities.find_by(["provider LIKE ?", "ldap%"])
end
def project_deploy_keys
DeployKey.unscoped.in_projects(self.authorized_projects.pluck(:id)).distinct(:id)
end
def accessible_deploy_keys
@accessible_deploy_keys ||= begin
key_ids = project_deploy_keys.pluck(:id)
key_ids.push(*DeployKey.are_public.pluck(:id))
DeployKey.where(id: key_ids)
end
end
def created_by
User.find_by(id: created_by_id) if created_by_id
end
def sanitize_attrs
%w(name username skype linkedin twitter).each do |attr|
value = self.send(attr)
self.send("#{attr}=", Sanitize.clean(value)) if value.present?
end
end
def set_notification_email
if self.notification_email.blank? || !self.all_emails.include?(self.notification_email)
self.notification_email = self.email
end
end
def set_public_email
if self.public_email.blank? || !self.all_emails.include?(self.public_email)
self.public_email = ''
end
end
def update_secondary_emails!
self.set_notification_email
self.set_public_email
self.save if self.notification_email_changed? || self.public_email_changed?
end
def set_projects_limit
connection_default_value_defined = new_record? && !projects_limit_changed?
return unless self.projects_limit.nil? || connection_default_value_defined
self.projects_limit = current_application_settings.default_projects_limit
end
def requires_ldap_check?
if !Gitlab.config.ldap.enabled
false
elsif ldap_user?
!last_credential_check_at || (last_credential_check_at + 1.hour) < Time.now
else
false
end
end
def try_obtain_ldap_lease
# After obtaining this lease LDAP checks will be blocked for 600 seconds
# (10 minutes) for this user.
lease = Gitlab::ExclusiveLease.new("user_ldap_check:#{id}", timeout: 600)
lease.try_obtain
end
def solo_owned_groups
@solo_owned_groups ||= owned_groups.select do |group|
group.owners == [self]
end
end
def with_defaults
User.defaults.each do |k, v|
self.send("#{k}=", v)
end
self
end
def can_leave_project?(project)
project.namespace != namespace &&
project.project_member(self)
end
# Reset project events cache related to this user
#
# Since we do cache @event we need to reset cache in special cases:
# * when the user changes their avatar
# Events cache stored like events/23-20130109142513.
# The cache key includes updated_at timestamp.
# Thus it will automatically generate a new fragment
# when the event is updated because the key changes.
def reset_events_cache
Event.where(author_id: self.id).
order('id DESC').limit(1000).
update_all(updated_at: Time.now)
end
def full_website_url
return "http://#{website_url}" if website_url !~ /\Ahttps?:\/\//
website_url
end
def short_website_url
website_url.sub(/\Ahttps?:\/\//, '')
end
def all_ssh_keys
keys.map(&:publishable_key)
end
def temp_oauth_email?
email.start_with?('temp-email-for-oauth')
end
def avatar_url(size = nil, scale = 2)
if avatar.present?
[gitlab_config.url, avatar.url].join
else
GravatarService.new.execute(email, size, scale)
end
end
def all_emails
all_emails = []
all_emails << self.email unless self.temp_oauth_email?
all_emails.concat(self.emails.map(&:email))
all_emails
end
def hook_attrs
{
name: name,
username: username,
avatar_url: avatar_url
}
end
def ensure_namespace_correct
# Ensure user has namespace
self.create_namespace!(path: self.username, name: self.username) unless self.namespace
if self.username_changed?
self.namespace.update_attributes(path: self.username, name: self.username)
end
end
def post_create_hook
log_info("User \"#{self.name}\" (#{self.email}) was created")
notification_service.new_user(self, @reset_token) if self.created_by_id
system_hook_service.execute_hooks_for(self, :create)
end
def post_destroy_hook
log_info("User \"#{self.name}\" (#{self.email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy)
end
def notification_service
NotificationService.new
end
def log_info(message)
Gitlab::AppLogger.info message
end
def system_hook_service
SystemHooksService.new
end
def starred?(project)
starred_projects.exists?(project.id)
end
def toggle_star(project)
UsersStarProject.transaction do
user_star_project = users_star_projects.
where(project: project, user: self).lock(true).first
if user_star_project
user_star_project.destroy
else
UsersStarProject.create!(project: project, user: self)
end
end
end
def manageable_namespaces
@manageable_namespaces ||= [namespace] + owned_groups + masters_groups
end
def namespaces
namespace_ids = groups.pluck(:id)
namespace_ids.push(namespace.id)
Namespace.where(id: namespace_ids)
end
def oauth_authorized_tokens
Doorkeeper::AccessToken.where(resource_owner_id: self.id, revoked_at: nil)
end
# Returns the projects a user contributed to in the last year.
#
# This method relies on a subquery as this performs significantly better
# compared to a JOIN when coupled with, for example,
# `Project.visible_to_user`. That is, consider the following code:
#
# some_user.contributed_projects.visible_to_user(other_user)
#
# If this method were to use a JOIN the resulting query would take roughly 200
# ms on a database with a similar size to GitLab.com's database. On the other
# hand, using a subquery means we can get the exact same data in about 40 ms.
def contributed_projects
events = Event.select(:project_id).
contributions.where(author_id: self).
where("created_at > ?", Time.now - 1.year).
uniq.
reorder(nil)
Project.where(id: events)
end
def restricted_signup_domains
email_domains = current_application_settings.restricted_signup_domains
unless email_domains.blank?
match_found = email_domains.any? do |domain|
escaped = Regexp.escape(domain).gsub('\*','.*?')
regexp = Regexp.new "^#{escaped}$", Regexp::IGNORECASE
email_domain = Mail::Address.new(self.email).domain
email_domain =~ regexp
end
unless match_found
self.errors.add :email,
'is not whitelisted. ' +
'Email domains valid for registration are: ' +
email_domains.join(', ')
return false
end
end
true
end
def can_be_removed?
!solo_owned_groups.present?
end
def ci_authorized_runners
@ci_authorized_runners ||= begin
runner_ids = Ci::RunnerProject.
where("ci_runner_projects.gl_project_id IN (#{ci_projects_union.to_sql})").
select(:runner_id)
Ci::Runner.specific.where(id: runner_ids)
end
end
def notification_settings_for(source)
notification_settings.find_or_initialize_by(source: source)
end
# Lazy load global notification setting
# Initializes User setting with Participating level if setting not persisted
def global_notification_setting
return @global_notification_setting if defined?(@global_notification_setting)
@global_notification_setting = notification_settings.find_or_initialize_by(source: nil)
@global_notification_setting.update_attributes(level: NotificationSetting.levels[DEFAULT_NOTIFICATION_LEVEL]) unless @global_notification_setting.persisted?
@global_notification_setting
end
def assigned_open_merge_request_count(force: false)
Rails.cache.fetch(['users', id, 'assigned_open_merge_request_count'], force: force) do
assigned_merge_requests.opened.count
end
end
def assigned_open_issues_count(force: false)
Rails.cache.fetch(['users', id, 'assigned_open_issues_count'], force: force) do
assigned_issues.opened.count
end
end
def update_cache_counts
assigned_open_merge_request_count(force: true)
assigned_open_issues_count(force: true)
end
def todos_done_count(force: false)
Rails.cache.fetch(['users', id, 'todos_done_count'], force: force) do
todos.done.count
end
end
def todos_pending_count(force: false)
Rails.cache.fetch(['users', id, 'todos_pending_count'], force: force) do
todos.pending.count
end
end
def update_todos_count_cache
todos_done_count(force: true)
todos_pending_count(force: true)
end
private
def projects_union(min_access_level = nil)
relations = [personal_projects.select(:id),
groups_projects.select(:id),
projects.select(:id),
groups.joins(:shared_projects).select(:project_id)]
if min_access_level
scope = { access_level: Gitlab::Access.values.select { |access| access >= min_access_level } }
relations = [relations.shift] + relations.map { |relation| relation.where(members: scope) }
end
Gitlab::SQL::Union.new(relations)
end
def ci_projects_union
scope = { access_level: [Gitlab::Access::MASTER, Gitlab::Access::OWNER] }
groups = groups_projects.where(members: scope)
other = projects.where(members: scope)
Gitlab::SQL::Union.new([personal_projects.select(:id), groups.select(:id),
other.select(:id)])
end
# Added according to https://github.com/plataformatec/devise/blob/7df57d5081f9884849ca15e4fde179ef164a575f/README.md#activejob-integration
def send_devise_notification(notification, *args)
devise_mailer.send(notification, self, *args).deliver_later
end
def ensure_external_user_rights
return unless self.external?
self.can_create_group = false
self.projects_limit = 0
end
end
| 29.605204 | 160 | 0.704673 |
d589e24a47a1ea9743f6d1854aa8561f19a5e062 | 1,076 | require 'devise/hooks/timeoutable'
module Devise
module Models
# Timeoutable takes care of verifying whether a user session has already
# expired or not. When a session expires after the configured time, the user
# will be asked for credentials again, it means, they will be redirected
# to the sign in page.
#
# == Options
#
# Timeoutable adds the following options to devise_for:
#
# * +timeout_in+: the interval to timeout the user session without activity.
#
# == Examples
#
# user.timedout?(30.minutes.ago)
#
module Timeoutable
extend ActiveSupport::Concern
def self.required_fields(klass)
[]
end
# Checks whether the user session has expired based on configured time.
def timedout?(last_access)
!timeout_in.nil? && last_access && last_access <= timeout_in.ago
end
def timeout_in
self.class.timeout_in
end
private
module ClassMethods
Devise::Models.config(self, :timeout_in)
end
end
end
end
| 24.454545 | 82 | 0.64777 |
6a3b212b72b9c6ef6798147afc7f09bc6ade2b7b | 2,184 | # encoding: UTF-8
=begin
Assistant de demande des informations générales
=end
def exec(options = nil)
# Les informations générales dont on a besoin
clear
notice "\n===Informations générales ==="
ask_for_titre unless titre
ask_for_titre_en unless titre_en
ask_for_description unless description
ask_for_publishing_date unless published_at
# On enregistre les informations
if titre || titre_en || description || published_at
informations.set({titre: titre, titre_en:titre_en, description:description, published_at:published_at})
# notice "Informations enregistrées."
else
notice "Aucune information pour le moment. Il faudra penser à les rentrer."
end
end
def ask_for_titre
puts <<-EOT
= Titre humain =
Nous devons déterminer le titre humain du tutoriel.
Le choisir avec soin car il sera utilisé dans les
annonces et autre.
(mais vous pourrez toujours le redéfinir par :
vitefait infos #{name} titre="new_titre")
EOT
res = prompt("Titre humain")
if res.nil?
puts "OK, pas de titre pour le moment…"
else
@titre = res
end
end
def ask_for_titre_en
puts <<-EOT
= Titre anglais =
J'ai besoin du titre anglais (pour le forum Scrivener).
(tu pourras toujours le redéfinir par :
vitefait infos #{name} titre_en='new_titre')
EOT
res = prompt("Titre anglais")
if res.nil?
puts "OK, pas de titre anglais pour le moment…"
else
@titre_en = res
end
end
def ask_for_description
puts <<-EOT
= Description =
Une description en une phrase, pour accompagner les
messages.
EOT
res = prompt("Description")
if res.nil?
puts "OK, pas de description pour le moment…"
else
@description = res
end
end
# Méthode pour demander la date de publication
def ask_for_publishing_date
puts <<-EOT
= Date de publication =
Quelle est la date prévue pour la publication ?
Au format : JJ MM AAAA
EOT
while true
date = prompt("Date publication")
if date.nil?
puts "OK, pas de date de publication pour le moment…"
return true
else
if Informations.published_date_valid?(date)
@published_at = date
return true
end
end
end
end
| 22.285714 | 107 | 0.705586 |
ac7b05fff234e2f1c2880c944f026e3ebf9fbb65 | 1,592 | require 'graphviz'
class PetriNet::CoverabilityGraph < PetriNet::Base
def initialize(net, options = Hash.new)
@objects = Array.new
@nodes = Hash.new
@edges = Hash.new
@name = net.name
if options['unlimited'].nil?
@unlimited = true
else
@unlimited = options['unlimited']
end
end
def add_node(node)
double = false
inf = false
@nodes.each_value do |n|
begin
if node > @objects[n]
if @unlimited
double = n
break
#return @objects[n].id *-1
else
raise PetriNet::Graph::InfiniteReachabilityGraphError
end
end
if -Float::INFINITY == (node <=> @objects[n])
inf = true
end
rescue ArgumentError
#just two different markings, completly ok
end
end
# if there was a smaller marking
return (@objects[double].id * -1) if double
node_index = @objects.index node
# if there already is a node with this marking
return @objects[node_index].id * -1 unless node_index.nil?
return -Float::INFINITY if inf
if (node.validate && ([email protected]? node.name))
@objects[node.id] = node
@nodes[node.name] = node.id
node.graph = self
return node.id
end
return false
end
end
| 28.945455 | 77 | 0.485553 |
610f878f21595d1297633819e1d29e7cacec1484 | 377 | class User < ApplicationRecord
# Associations
has_many :businesses
has_many :donations
has_many :reviews
has_secure_password
#
validates :email, presence: true, uniqueness: true
# Validations
# validates :name, :email, :password, presence: true
# # validates :password, length: { minimum: 6, allow_blank: true }
#
validates :email, uniqueness:{case_sensitive: false}
end
| 17.952381 | 66 | 0.763926 |
01932e224006de829401b71de14cc28fa7384a8b | 3,802 | =begin
#Topological Inventory
#Topological Inventory
OpenAPI spec version: 0.1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 3.3.4
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for TopologicalInventoryApiClient::ContainerNode
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'ContainerNode' do
before do
# run before each test
@instance = TopologicalInventoryApiClient::ContainerNode.new
end
after do
# run after each test
end
describe 'test an instance of ContainerNode' do
it 'should create an instance of ContainerNode' do
expect(@instance).to be_instance_of(TopologicalInventoryApiClient::ContainerNode)
end
end
describe 'test attribute "archived_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "cpus"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "created_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "last_seen_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "lives_on_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "lives_on_type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "memory"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "resource_version"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "source_created_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "source_deleted_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "source_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "source_ref"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "taggings"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "updated_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 28.80303 | 102 | 0.714361 |
d58a35b98b86eee6d549ac91add3ec34b97178d2 | 3,116 | class ShadowsocksLibev < Formula
desc "Libev port of shadowsocks"
homepage "https://github.com/shadowsocks/shadowsocks-libev"
url "https://github.com/shadowsocks/shadowsocks-libev/releases/download/v3.3.5/shadowsocks-libev-3.3.5.tar.gz"
sha256 "cfc8eded35360f4b67e18dc447b0c00cddb29cc57a3cec48b135e5fb87433488"
license "GPL-3.0"
bottle do
sha256 cellar: :any, catalina: "1c324cc200e2c895d672f36f239e2c48588ced81ea9716643a0b2b36757fb7e9"
sha256 cellar: :any, mojave: "83a23ecda43df6ef6097aa728de12f4dab8f1595cc9197ef8e29b4b1e5fd8822"
sha256 cellar: :any, high_sierra: "d6f9af357976033c8965e8b8bc7d52a8023b1ec797378f9dd292e74a43c0b134"
end
head do
url "https://github.com/shadowsocks/shadowsocks-libev.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "asciidoc" => :build
depends_on "xmlto" => :build
depends_on "c-ares"
depends_on "libev"
depends_on "libsodium"
depends_on :macos # Due to Python 2
depends_on "mbedtls"
depends_on "pcre"
def install
ENV["XML_CATALOG_FILES"] = etc/"xml/catalog"
system "./autogen.sh" if build.head?
system "./configure", "--prefix=#{prefix}"
system "make"
(buildpath/"shadowsocks-libev.json").write <<~EOS
{
"server":"localhost",
"server_port":8388,
"local_port":1080,
"password":"barfoo!",
"timeout":600,
"method":null
}
EOS
etc.install "shadowsocks-libev.json"
system "make", "install"
end
plist_options manual: "#{HOMEBREW_PREFIX}/opt/shadowsocks-libev/bin/ss-local -c #{HOMEBREW_PREFIX}/etc/shadowsocks-libev.json"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/ss-local</string>
<string>-c</string>
<string>#{etc}/shadowsocks-libev.json</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
</dict>
</plist>
EOS
end
test do
server_port = free_port
local_port = free_port
(testpath/"shadowsocks-libev.json").write <<~EOS
{
"server":"127.0.0.1",
"server_port":#{server_port},
"local":"127.0.0.1",
"local_port":#{local_port},
"password":"test",
"timeout":600,
"method":null
}
EOS
server = fork { exec bin/"ss-server", "-c", testpath/"shadowsocks-libev.json" }
client = fork { exec bin/"ss-local", "-c", testpath/"shadowsocks-libev.json" }
sleep 3
begin
system "curl", "--socks5", "127.0.0.1:#{local_port}", "github.com"
ensure
Process.kill 9, server
Process.wait server
Process.kill 9, client
Process.wait client
end
end
end
| 29.396226 | 128 | 0.62163 |
ac465b3c069e051876f0eb5ca41e0f59cf539765 | 1,471 | require 'rails_helper'
describe Schools::OnBoarding::SubjectList, type: :model do
context '#attibutes' do
it { is_expected.to respond_to :subject_ids }
end
context '#subject_ids' do
it 'removes blank values' do
expect(described_class.new(subject_ids: [""]).subject_ids).to eq []
end
it 'converts strings to integers' do
expect(described_class.new(subject_ids: %w[2]).subject_ids).to eq [2]
end
end
context 'validations' do
context 'when no subjects selected' do
subject { described_class.new subject_ids: [] }
it 'is invalid' do
expect(subject).not_to be_valid
expect(subject.errors.full_messages).to eq ['Select at least one subject']
end
end
context 'when non existent subject ids selected' do
let :bookings_subject do
FactoryBot.create :bookings_subject
end
subject { described_class.new subject_ids: [bookings_subject.id + 1] }
it 'is invalid' do
expect(subject).not_to be_valid
expect(subject.errors.full_messages).to eq ['Subject not available']
end
end
end
context '.new_from_bookings_school' do
let :bookings_school do
FactoryBot.create :bookings_school, :with_subjects
end
subject { described_class.new_from_bookings_school bookings_school }
it 'sets the subjects from the bookings school' do
expect(subject.subject_ids).to eq bookings_school.subject_ids
end
end
end
| 27.240741 | 82 | 0.692726 |
1115fcfc00c2864f64aeb82f863001bac1723165 | 46,809 | require File.expand_path('../../../../spec_helper', __FILE__)
module Pod
describe TargetIntegrator = Installer::UserProjectIntegrator::TargetIntegrator do
describe 'In general' do
# The project contains a `PBXReferenceProxy` in the build files of the
# frameworks build phase which implicitly checks for the robustness of
# the detection of the target.
#
before do
project_path = SpecHelper.create_sample_app_copy_from_fixture('SampleProject')
@project = Xcodeproj::Project.open(project_path)
Project.new(config.sandbox.project_path).save
@target = @project.targets.first
target_definition = Podfile::TargetDefinition.new('Pods', nil)
target_definition.abstract = false
user_build_configurations = { 'Release' => :release, 'Debug' => :debug }
@pod_bundle = AggregateTarget.new(config.sandbox, BuildType.static_library, user_build_configurations, [],
Platform.ios, target_definition, project_path.dirname, @project,
[@target.uuid], {})
@pod_bundle.stubs(:resource_paths_by_config).returns('Release' => %w(${PODS_ROOT}/Lib/Resources/image.png))
@pod_bundle.stubs(:framework_paths_by_config).returns('Release' => [Xcode::FrameworkPaths.new('${PODS_BUILD_DIR}/Lib/Lib.framework')])
configuration = Xcodeproj::Config.new(
'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) COCOAPODS=1',
)
@pod_bundle.xcconfigs['Debug'] = configuration
@pod_bundle.xcconfigs['Release'] = configuration
@target_integrator = TargetIntegrator.new(@pod_bundle)
@phase_prefix = TargetIntegrator::BUILD_PHASE_PREFIX
@user_phase_prefix = TargetIntegrator::USER_BUILD_PHASE_PREFIX
@embed_framework_phase_name = @phase_prefix + TargetIntegrator::EMBED_FRAMEWORK_PHASE_NAME
@copy_pods_resources_phase_name = @phase_prefix + TargetIntegrator::COPY_PODS_RESOURCES_PHASE_NAME
@check_manifest_phase_name = @phase_prefix + TargetIntegrator::CHECK_MANIFEST_PHASE_NAME
@user_script_phase_name = @user_phase_prefix + 'Custom Script'
end
describe '#integrate!' do
it 'set the CocoaPods xcconfigs' do
TargetIntegrator::XCConfigIntegrator.expects(:integrate).with(@pod_bundle, [@target])
@target_integrator.integrate!
end
it 'allows the xcconfig integrator to edit already integrated targets if needed' do
TargetIntegrator::XCConfigIntegrator.expects(:integrate).with(@pod_bundle, [@target])
@target_integrator.integrate!
end
it 'adds references to the Pods static libraries to the Frameworks group' do
@target_integrator.integrate!
@target_integrator.send(:user_project)['Frameworks/libPods.a'].should.not.be.nil
end
it 'adds the libPods static library to the "Link binary with libraries" build phase of each target' do
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.frameworks_build_phase
build_file = phase.files.find { |f| f.file_ref.path == 'libPods.a' }
build_file.should.not.be.nil
end
it 'deletes old product type references if the product type has changed' do
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.frameworks_build_phase
phase.files.find { |f| f.file_ref.path == 'libPods.a' }.should.not.be.nil
phase.files.find { |f| f.file_ref.path == 'Pods.framework' }.should.be.nil
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
phase.files.find { |f| f.file_ref.path == 'libPods.a' }.should.be.nil
phase.files.find { |f| f.file_ref.path == 'Pods.framework' }.should.not.be.nil
end
it 'cleans up linked libraries and frameworks from the frameworks build phase' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.frameworks_build_phase
phase.files.find { |f| f.file_ref.path == 'Pods.framework' }.should.not.be.nil
phase.files.find { |f| f.file_ref.path == 'Pods-Something.framework' }.should.be.nil
@pod_bundle.stubs(:product_name => 'Pods-Something.framework')
@pod_bundle.stubs(:product_basename => 'Pods-Something')
@target_integrator.integrate!
phase.files.find { |f| f.file_ref.path == 'Pods.framework' }.should.be.nil
phase.files.find { |f| f.file_ref.path == 'Pods-Something.framework' }.should.not.be.nil
end
it 'adds references to the Pods static framework to the Frameworks group' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
@target_integrator.send(:user_project)['Frameworks/Pods.framework'].should.not.be.nil
end
it 'adds the Pods static framework to the "Link binary with libraries" build phase of each target' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.frameworks_build_phase
build_file = phase.files.find { |f| f.file_ref.path == 'Pods.framework' }
build_file.should.not.be.nil
end
it 'adds a Copy Pods Resources build phase to each target' do
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase_name = @copy_pods_resources_phase_name
phase = target.shell_script_build_phases.find { |bp| bp.name == phase_name }
phase.shell_script.strip.should == '"${PODS_ROOT}/Target Support Files/Pods/Pods-resources.sh"'
end
it 'adds a Check Manifest.lock build phase to each target' do
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase_name = @check_manifest_phase_name
phase = target.shell_script_build_phases.find { |bp| bp.name == phase_name }
phase.shell_script.should == <<-EOS.strip_heredoc
diff "${PODS_PODFILE_DIR_PATH}/Podfile.lock" "${PODS_ROOT}/Manifest.lock" > /dev/null
if [ $? != 0 ] ; then
# print error to STDERR
echo "error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation." >&2
exit 1
fi
# This output is used by Xcode 'outputs' to avoid re-running this script phase.
echo "SUCCESS" > "${SCRIPT_OUTPUT_FILE_0}"
EOS
end
it 'adds the Check Manifest.lock build phase as the first build phase' do
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
target.build_phases.first
phase_name = @check_manifest_phase_name
phase = target.build_phases.find { |bp| bp.name == phase_name }
target.build_phases.first.should.equal? phase
end
it 'does not perform the integration if there are no targets to integrate' do
Installer::UserProjectIntegrator::TargetIntegrator::XCConfigIntegrator.
integrate(@pod_bundle, @target_integrator.send(:native_targets))
@target_integrator.stubs(:native_targets).returns([])
frameworks = @target_integrator.send(:user_project).frameworks_group.children
@target_integrator.integrate!
@target_integrator.send(:user_project).frameworks_group.children.should == frameworks
end
it 'adds an embed frameworks build phase if frameworks are used' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
end
it 'adds an embed frameworks build phase by default' do
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
end
it 'adds an embed frameworks build phase if the target to integrate is a messages application' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
target = @target_integrator.send(:native_targets).first
target.stubs(:symbol_type).returns(:messages_application)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
end
it 'adds an embed frameworks build phase if the target to integrate is an app clip' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
target = @target_integrator.send(:native_targets).first
target.stubs(:symbol_type).returns(:application_on_demand_install_capable)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
end
it 'does not add an embed frameworks build phase if the target to integrate is a framework' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
target = @target_integrator.send(:native_targets).first
target.stubs(:symbol_type).returns(:framework)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == true
end
it 'does not add an embed frameworks build phase if the target to integrate is an app extension' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
target = @target_integrator.send(:native_targets).first
target.stubs(:symbol_type).returns(:app_extension)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == true
end
it 'does not add an embed frameworks build phase if the target to integrate is a watch extension' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
target = @target_integrator.send(:native_targets).first
target.stubs(:symbol_type).returns(:watch_extension)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == true
end
it 'adds an embed frameworks build phase if the target to integrate is a watchOS 2 extension' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
target = @target_integrator.send(:native_targets).first
target.stubs(:symbol_type).returns(:watch2_extension)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
end
it 'does not add an embed frameworks build phase if the target to integrate is a messages extension' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
target = @target_integrator.send(:native_targets).first
target.stubs(:symbol_type).returns(:messages_extension)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == true
end
it 'adds an embed frameworks build phase if the target to integrate is a UI Test bundle' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
target = @target_integrator.send(:native_targets).first
target.stubs(:symbol_type).returns(:ui_test_bundle)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
end
it 'does not remove existing embed frameworks build phases from integrated framework targets' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
@pod_bundle.stubs(:requires_frameworks? => false)
target = @target_integrator.send(:native_targets).first
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.should.not.be.nil
end
it 'does not remove existing embed frameworks build phases if frameworks are not used anymore' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
@pod_bundle.stubs(:requires_frameworks? => false)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
end
it 'removes embed frameworks build phases from app extension targets' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
target.stubs(:symbol_type).returns(:app_extension)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == true
end
it 'removes embed frameworks build phases from watch extension targets' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
target.stubs(:symbol_type).returns(:watch_extension)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == true
end
it 'removes embed frameworks build phases from messages extension targets that are used in an iOS app' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
target.stubs(:symbol_type).returns(:messages_extension)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == true
end
it 'does not remove embed frameworks build phases from messages extension targets that are used in a messages app' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
target.stubs(:symbol_type).returns(:messages_extension)
@pod_bundle.stubs(:requires_host_target? => false) # Messages extensions for messages applications do not require a host target
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
end
it 'removes embed frameworks build phases from framework targets' do
@pod_bundle.stubs(:build_type => BuildType.dynamic_framework)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == false
target.stubs(:symbol_type).returns(:framework)
@target_integrator.integrate!
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.nil?.should == true
end
it 'does not add copy pods resources script phase with no resources' do
@pod_bundle.stubs(:resource_paths_by_config => { 'Debug' => [], 'Release' => [] })
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @copy_pods_resources_phase_name }
phase.should.be.nil
end
it 'removes copy resources phase if it becomes empty' do
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @copy_pods_resources_phase_name }
phase.input_paths.sort.should == %w(
${PODS_ROOT}/Lib/Resources/image.png
${PODS_ROOT}/Target\ Support\ Files/Pods/Pods-resources.sh
)
# Now pretend the same target has no more framework paths, it should update the targets input/output paths
@pod_bundle.stubs(:resource_paths_by_config => {})
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @copy_pods_resources_phase_name }
phase.should.be.nil
end
it 'clears input and output paths from script phase if it exceeds limit' do
# The paths represented here will be 501 for input paths and 501 for output paths which will exceed the limit.
paths = (0..500).map do |i|
"${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugLibPng#{i}.png"
end
resource_paths_by_config = {
'Debug' => paths,
'Release' => paths,
}
@pod_bundle.stubs(:resource_paths_by_config => resource_paths_by_config)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @copy_pods_resources_phase_name }
phase.input_paths.should == []
phase.output_paths.should == []
end
it 'adds copy pods resources input and output paths' do
resource_paths_by_config = {
'Debug' => [
'${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugAssets.xcassets',
'${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugDataModel.xcdatamodeld',
'${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugDataModel.xcdatamodel',
'${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugMappingModel.xcmappingmodel',
'${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugLib.bundle',
],
'Release' => [
'${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLib.bundle',
'${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLib.storyboard',
'${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLibXIB.xib',
],
}
@pod_bundle.stubs(:resource_paths_by_config => resource_paths_by_config)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @copy_pods_resources_phase_name }
phase.input_paths.sort.should == %w(
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugAssets.xcassets
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugDataModel.xcdatamodel
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugDataModel.xcdatamodeld
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugLib.bundle
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugMappingModel.xcmappingmodel
${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLib.bundle
${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLib.storyboard
${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLibXIB.xib
${PODS_ROOT}/Target\ Support\ Files/Pods/Pods-resources.sh
)
phase.output_paths.sort.should == %w(
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Assets.car
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/DebugDataModel.mom
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/DebugDataModel.momd
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/DebugLib.bundle
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/DebugMappingModel.cdm
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/ReleaseLib.bundle
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/ReleaseLib.storyboardc
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/ReleaseLibXIB.nib
)
end
it 'adds copy pods resources input and output paths without duplicates' do
resource_paths_by_config = {
'Debug' => [
'${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/SomeBundle.bundle',
],
'Release' => [
'${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/SomeBundle.bundle',
],
}
@pod_bundle.stubs(:resource_paths_by_config => resource_paths_by_config)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @copy_pods_resources_phase_name }
phase.input_paths.sort.should == %w(
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/SomeBundle.bundle
${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/SomeBundle.bundle
${PODS_ROOT}/Target\ Support\ Files/Pods/Pods-resources.sh
)
phase.output_paths.sort.should == %w(
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/SomeBundle.bundle
)
end
it 'does not add embed frameworks build phase with no frameworks' do
@pod_bundle.stubs(:framework_paths_by_config => { 'Debug' => {}, 'Release' => {} })
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.should.be.nil
end
it 'removes embed frameworks phase if it becomes empty' do
debug_non_vendored_framework = Xcode::FrameworkPaths.new('${BUILT_PRODUCTS_DIR}/DebugCompiledFramework/DebugCompiledFramework.framework')
framework_paths_by_config = {
'Debug' => [debug_non_vendored_framework],
}
@pod_bundle.stubs(:framework_paths_by_config => framework_paths_by_config)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.input_paths.sort.should == %w(
${BUILT_PRODUCTS_DIR}/DebugCompiledFramework/DebugCompiledFramework.framework
${PODS_ROOT}/Target\ Support\ Files/Pods/Pods-frameworks.sh
)
# Now pretend the same target has no more framework paths, it should remove the script phase
@pod_bundle.stubs(:framework_paths_by_config => {})
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.should.be.nil
end
it 'adds embed frameworks build phase input and output paths for vendored and non vendored frameworks' do
debug_vendored_framework = Xcode::FrameworkPaths.new('${PODS_ROOT}/DebugVendoredFramework/ios/DebugVendoredFramework.framework',
'${PODS_ROOT}/DebugVendoredFramework/ios/DebugVendoredFramework.framework.dSYM',
['${PODS_ROOT}/DebugVendoredFramework/ios/A6621399-62A0-3DC3-A6E3-B6B51BD287AD.bcsymbolmap'])
debug_non_vendored_framework = Xcode::FrameworkPaths.new('${BUILT_PRODUCTS_DIR}/DebugCompiledFramework/DebugCompiledFramework.framework')
release_vendored_framework = Xcode::FrameworkPaths.new('${PODS_ROOT}/ReleaseVendoredFramework/ios/ReleaseVendoredFramework.framework',
'${PODS_ROOT}/ReleaseVendoredFramework/ios/ReleaseVendoredFramework.framework.dSYM')
framework_paths_by_config = {
'Debug' => [debug_vendored_framework, debug_non_vendored_framework],
'Release' => [release_vendored_framework],
}
@pod_bundle.stubs(:framework_paths_by_config => framework_paths_by_config)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
# dSYM and bcsymbolmaps are intentionally excluded as they are handled by a different script phase within
# the pod target.
phase.input_paths.sort.should == %w(
${BUILT_PRODUCTS_DIR}/DebugCompiledFramework/DebugCompiledFramework.framework
${PODS_ROOT}/DebugVendoredFramework/ios/DebugVendoredFramework.framework
${PODS_ROOT}/ReleaseVendoredFramework/ios/ReleaseVendoredFramework.framework
${PODS_ROOT}/Target\ Support\ Files/Pods/Pods-frameworks.sh
)
phase.output_paths.sort.should == %w(
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DebugCompiledFramework.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DebugVendoredFramework.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ReleaseVendoredFramework.framework
)
end
it 'adds embed frameworks build phase input and output paths for vendored and non vendored frameworks without duplicate' do
debug_vendored_framework = Xcode::FrameworkPaths.new('${PODS_ROOT}/DebugVendoredFramework/ios/SomeFramework.framework',
'${PODS_ROOT}/DebugVendoredFramework/ios/SomeFramework.framework.dSYM')
debug_non_vendored_framework = Xcode::FrameworkPaths.new('${BUILT_PRODUCTS_DIR}/DebugCompiledFramework/CompiledFramework.framework')
release_vendored_framework = Xcode::FrameworkPaths.new('${PODS_ROOT}/ReleaseVendoredFramework/ios/SomeFramework.framework',
'${PODS_ROOT}/ReleaseVendoredFramework/ios/SomeFramework.framework.dSYM')
release_non_vendored_framework = Xcode::FrameworkPaths.new('${BUILT_PRODUCTS_DIR}/ReleaseCompiledFramework/CompiledFramework.framework')
framework_paths_by_config = {
'Debug' => [debug_vendored_framework, debug_non_vendored_framework],
'Release' => [release_vendored_framework, release_non_vendored_framework],
}
@pod_bundle.stubs(:framework_paths_by_config => framework_paths_by_config)
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @embed_framework_phase_name }
phase.input_paths.sort.should == %w(
${BUILT_PRODUCTS_DIR}/DebugCompiledFramework/CompiledFramework.framework
${BUILT_PRODUCTS_DIR}/ReleaseCompiledFramework/CompiledFramework.framework
${PODS_ROOT}/DebugVendoredFramework/ios/SomeFramework.framework
${PODS_ROOT}/ReleaseVendoredFramework/ios/SomeFramework.framework
${PODS_ROOT}/Target\ Support\ Files/Pods/Pods-frameworks.sh
)
phase.output_paths.sort.should == %w(
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/CompiledFramework.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SomeFramework.framework
)
end
it 'removes script phases that have been removed from CocoaPods' do
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
TargetIntegrator::REMOVED_SCRIPT_PHASE_NAMES.each do |name|
phase = target.shell_script_build_phases.find { |bp| bp.name.end_with?(name) }
phase.should.be.nil?
phase = target.new_shell_script_build_phase("#{TargetIntegrator::BUILD_PHASE_PREFIX}#{name}")
phase.should.not.be.nil?
end
# Re-integrate and ensure the phases are now removed
target.new_shell_script_build_phase('[CP] Prepare Artifacts')
@target_integrator.integrate!
TargetIntegrator::REMOVED_SCRIPT_PHASE_NAMES.each do |name|
phase = target.shell_script_build_phases.find { |bp| bp.name.end_with?(name) }
phase.should.be.nil?
end
end
it 'adds a custom shell script phase' do
@pod_bundle.target_definition.stubs(:script_phases).returns([:name => 'Custom Script', :script => 'echo "Hello World"'])
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @user_script_phase_name }
phase.name.should == '[CP-User] Custom Script'
phase.shell_script.should == 'echo "Hello World"'
phase.shell_path.should == '/bin/sh'
phase.input_paths.should.be.nil
phase.output_paths.should.be.nil
phase.input_file_list_paths.should.be.nil
phase.output_file_list_paths.should.be.nil
phase.show_env_vars_in_log.should.be.nil
phase.dependency_file.should.be.nil
end
it 'adds a custom shell script phase with input/output paths' do
@pod_bundle.target_definition.stubs(:script_phases).returns([:name => 'Custom Script',
:script => 'echo "Hello World"',
:input_files => ['/path/to/input_file.txt'],
:output_files => ['/path/to/output_file.txt'],
:input_file_lists => ['/path/to/input_file.xcfilelist'],
:output_file_lists => ['/path/to/output_file.xcfilelist']])
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @user_script_phase_name }
phase.name.should == '[CP-User] Custom Script'
phase.shell_script.should == 'echo "Hello World"'
phase.shell_path.should == '/bin/sh'
phase.input_paths.should == ['/path/to/input_file.txt']
phase.output_paths.should == ['/path/to/output_file.txt']
phase.input_file_list_paths == ['/path/to/input_file.xcfilelist']
phase.output_file_list_paths.should == ['/path/to/output_file.xcfilelist']
phase.show_env_vars_in_log.should.be.nil
phase.dependency_file.should.be.nil
end
it 'adds a custom shell script phase with dependency file' do
@pod_bundle.target_definition.stubs(:script_phases).returns([:name => 'Custom Script',
:script => 'echo "Hello World"',
:dependency_file => '/path/to/depfile.d'])
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @user_script_phase_name }
phase.should.not.be.nil?
phase.name.should == '[CP-User] Custom Script'
phase.shell_script.should == 'echo "Hello World"'
phase.shell_path.should == '/bin/sh'
phase.input_paths.should.be.nil
phase.output_paths.should.be.nil
phase.input_file_list_paths.should.be.nil
phase.output_file_list_paths.should.be.nil
phase.show_env_vars_in_log.should.be.nil
phase.dependency_file.should == '/path/to/depfile.d'
end
it 'sets the show_env_vars_in_log value to 0 if its explicitly set' do
@pod_bundle.target_definition.stubs(:script_phases).returns([:name => 'Custom Script',
:script => 'echo "Hello World"',
:show_env_vars_in_log => '0'])
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @user_script_phase_name }
phase.should.not.be.nil?
phase.show_env_vars_in_log.should == '0'
end
it 'does not set the show_env_vars_in_log value to 1 even if its set' do
# Since Xcode 10 this value never gets transcribed into the `.pbxproj` file which causes Xcode 10 to _remove_
# it if it's been added and causing a dirty file in git repos.
@pod_bundle.target_definition.stubs(:script_phases).returns([:name => 'Custom Script',
:script => 'echo "Hello World"',
:show_env_vars_in_log => '1'])
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
phase = target.shell_script_build_phases.find { |bp| bp.name == @user_script_phase_name }
# Even though the user has set this to '1' we expect this to be `nil`.
phase.show_env_vars_in_log.should.be.nil
end
it 'removes outdated custom shell script phases' do
@pod_bundle.target_definition.stubs(:script_phases).returns([:name => 'Custom Script', :script => 'echo "Hello World"'])
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
target.shell_script_build_phases.find { |bp| bp.name == @user_script_phase_name }.should.not.be.nil
@pod_bundle.target_definition.stubs(:script_phases).returns([])
@target_integrator.integrate!
target.shell_script_build_phases.find { |bp| bp.name == @user_script_phase_name }.should.be.nil
end
it 'moves custom shell scripts according to their execution position' do
@pod_bundle.target_definition.stubs(:script_phases).returns([])
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
# By calling this, xcodeproj automatically adds this phase to the native target.
target.headers_build_phase
target.build_phases.map(&:display_name).should == [
'[CP] Check Pods Manifest.lock',
'Sources',
'Frameworks',
'Resources',
'[CP] Embed Pods Frameworks',
'[CP] Copy Pods Resources',
'Headers',
]
shell_script_one = { :name => 'Custom Script', :script => 'echo "Hello World"', :execution_position => :before_compile }
shell_script_two = { :name => 'Custom Script 2', :script => 'echo "Hello Aliens"' }
shell_script_three = { :name => 'Custom Script 3', :script => 'echo "Hello Aliens"', :execution_position => :before_headers }
@pod_bundle.target_definition.stubs(:script_phases).returns([shell_script_one, shell_script_two, shell_script_three])
@target_integrator.integrate!
target.build_phases.map(&:display_name).should == [
'[CP] Check Pods Manifest.lock',
'[CP-User] Custom Script',
'Sources',
'Frameworks',
'Resources',
'[CP] Embed Pods Frameworks',
'[CP] Copy Pods Resources',
'[CP-User] Custom Script 3',
'Headers',
'[CP-User] Custom Script 2',
]
shell_script_one = { :name => 'Custom Script', :script => 'echo "Hello World"', :execution_position => :after_compile }
shell_script_two = { :name => 'Custom Script 2', :script => 'echo "Hello Aliens"', :execution_position => :before_compile }
shell_script_three = { :name => 'Custom Script 3', :script => 'echo "Hello Aliens"', :execution_position => :after_headers }
@pod_bundle.target_definition.stubs(:script_phases).returns([shell_script_one, shell_script_two, shell_script_three])
@target_integrator.integrate!
target.build_phases.map(&:display_name).should == [
'[CP] Check Pods Manifest.lock',
'[CP-User] Custom Script 2',
'Sources',
'[CP-User] Custom Script',
'Frameworks',
'Resources',
'[CP] Embed Pods Frameworks',
'[CP] Copy Pods Resources',
'Headers',
'[CP-User] Custom Script 3',
]
shell_script_one = { :name => 'Custom Script', :script => 'echo "Hello World"' }
shell_script_two = { :name => 'Custom Script 2', :script => 'echo "Hello Aliens"' }
@pod_bundle.target_definition.stubs(:script_phases).returns([shell_script_one, shell_script_two])
@target_integrator.integrate!
target.build_phases.map(&:display_name).should == [
'[CP] Check Pods Manifest.lock',
'[CP-User] Custom Script 2',
'Sources',
'[CP-User] Custom Script',
'Frameworks',
'Resources',
'[CP] Embed Pods Frameworks',
'[CP] Copy Pods Resources',
'Headers',
]
end
it 'adds, removes and moves custom shell script phases' do
shell_script_one = { :name => 'Custom Script', :script => 'echo "Hello World"' }
shell_script_two = { :name => 'Custom Script 2', :script => 'echo "Hello Aliens"' }
shell_script_three = { :name => 'Custom Script 3', :script => 'echo "Hello Universe"' }
shell_script_four = { :name => 'Custom Script 4', :script => 'echo "Ran out of Hellos"' }
@pod_bundle.target_definition.stubs(:script_phases).returns([shell_script_one, shell_script_two, shell_script_three])
@target_integrator.integrate!
target = @target_integrator.send(:native_targets).first
target.build_phases.map(&:display_name).should == [
'[CP] Check Pods Manifest.lock',
'Sources',
'Frameworks',
'Resources',
'[CP] Embed Pods Frameworks',
'[CP] Copy Pods Resources',
'[CP-User] Custom Script',
'[CP-User] Custom Script 2',
'[CP-User] Custom Script 3',
]
@pod_bundle.target_definition.stubs(:script_phases).returns([shell_script_two, shell_script_four])
@target_integrator.integrate!
target.build_phases.map(&:display_name).should == [
'[CP] Check Pods Manifest.lock',
'Sources',
'Frameworks',
'Resources',
'[CP] Embed Pods Frameworks',
'[CP] Copy Pods Resources',
'[CP-User] Custom Script 2',
'[CP-User] Custom Script 4',
]
end
it 'does not touch non cocoapods shell script phases' do
@pod_bundle.target_definition.stubs(:script_phases).returns([:name => 'Custom Script', :script => 'echo "Hello World"'])
target = @target_integrator.send(:native_targets).first
target.new_shell_script_build_phase('User Script Phase 1')
target.new_shell_script_build_phase('User Script Phase 2')
@target_integrator.integrate!
target.build_phases.map(&:display_name).should == [
'[CP] Check Pods Manifest.lock',
'Sources',
'Frameworks',
'Resources',
'User Script Phase 1',
'User Script Phase 2',
'[CP] Embed Pods Frameworks',
'[CP] Copy Pods Resources',
'[CP-User] Custom Script',
]
@pod_bundle.target_definition.stubs(:script_phases).returns([])
@target_integrator.integrate!
target.build_phases.map(&:display_name).should == [
'[CP] Check Pods Manifest.lock',
'Sources',
'Frameworks',
'Resources',
'User Script Phase 1',
'User Script Phase 2',
'[CP] Embed Pods Frameworks',
'[CP] Copy Pods Resources',
]
end
end
describe 'Script paths' do
it 'calculates the output paths of the embed frameworks script' do
paths = [
Xcode::FrameworkPaths.new('${PODS_ROOT}/DebugVendoredFramework/ios/SomeFramework.framework',
'${PODS_ROOT}/DebugVendoredFramework/ios/SomeFramework.framework.dSYM'),
Xcode::FrameworkPaths.new('${BUILT_PRODUCTS_DIR}/DebugCompiledFramework/CompiledFramework.framework'),
Xcode::FrameworkPaths.new('${PODS_ROOT}/ReleaseVendoredFramework/ios/SomeFramework.framework',
'${PODS_ROOT}/ReleaseVendoredFramework/ios/SomeFramework.framework.dSYM'),
Xcode::FrameworkPaths.new('${BUILT_PRODUCTS_DIR}/ReleaseCompiledFramework/CompiledFramework.framework'),
]
xcframeworks = [
Xcode::XCFramework.new(fixture('CoconutLib.xcframework')),
]
xcframeworks[0].stubs(:build_type).returns(BuildType.dynamic_framework)
TargetIntegrator.embed_frameworks_output_paths(paths, xcframeworks).sort.should == %w(
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/CoconutLib.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/CompiledFramework.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SomeFramework.framework
)
end
it 'does not include static xcframeworks in the embed frameworks output paths' do
xcframeworks = [
Xcode::XCFramework.new(fixture('CoconutLib.xcframework')),
]
xcframeworks[0].stubs(:build_type).returns(BuildType.static_framework)
TargetIntegrator.embed_frameworks_output_paths([], xcframeworks).should == []
end
it 'calculates the output paths of the copy resources script' do
resource_paths = %w(
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugAssets.xcassets
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugDataModel.xcdatamodeld
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugDataModel.xcdatamodel
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugMappingModel.xcmappingmodel
${PODS_CONFIGURATION_BUILD_DIR}/DebugLib/DebugLib.bundle
${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLib.bundle
${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLibXIB.xib
${PODS_CONFIGURATION_BUILD_DIR}/ReleaseLib/ReleaseLib.storyboard
)
TargetIntegrator.resource_output_paths(resource_paths).sort.should == %w(
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Assets.car
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/DebugDataModel.mom
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/DebugDataModel.momd
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/DebugLib.bundle
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/DebugMappingModel.cdm
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/ReleaseLib.bundle
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/ReleaseLib.storyboardc
${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/ReleaseLibXIB.nib
)
end
end
describe 'Private helpers' do
it 'returns the native targets associated with the Pod bundle' do
@target_integrator.send(:native_targets).map(&:name).should == %w( SampleProject )
end
it 'is robust against other types of references in the build files of the frameworks build phase' do
build_file = @project.new(Xcodeproj::Project::PBXBuildFile)
build_file.file_ref = @project.new(Xcodeproj::Project::PBXVariantGroup)
@target_integrator.stubs(:user_project).returns(@project)
@target.frameworks_build_phase.files << build_file
@target_integrator.send(:native_targets).map(&:name).should == %w( SampleProject )
end
it 'is robust against build files with missing file references' do
build_file = @project.new(Xcodeproj::Project::PBXBuildFile)
build_file.file_ref = nil
@target_integrator.stubs(:user_project).returns(@project)
@target.frameworks_build_phase.files << build_file
@target_integrator.send(:native_targets).map(&:name).should == %w( SampleProject )
end
end
end
end
end
| 56.260817 | 156 | 0.64791 |
ac0d3bb44e069a8a1e14bf0d9a081a6545c54eb6 | 7,652 | # frozen_string_literal: true
require 'rubygems/text'
class Gem::Licenses
extend Gem::Text
NONSTANDARD = 'Nonstandard'.freeze
# Software Package Data Exchange (SPDX) standard open-source software
# license identifiers
LICENSE_IDENTIFIERS = %w(
0BSD
AAL
ADSL
AFL-1.1
AFL-1.2
AFL-2.0
AFL-2.1
AFL-3.0
AGPL-1.0
AGPL-3.0
AGPL-3.0-only
AGPL-3.0-or-later
AMDPLPA
AML
AMPAS
ANTLR-PD
APAFML
APL-1.0
APSL-1.0
APSL-1.1
APSL-1.2
APSL-2.0
Abstyles
Adobe-2006
Adobe-Glyph
Afmparse
Aladdin
Apache-1.0
Apache-1.1
Apache-2.0
Artistic-1.0
Artistic-1.0-Perl
Artistic-1.0-cl8
Artistic-2.0
BSD-1-Clause
BSD-2-Clause
BSD-2-Clause-FreeBSD
BSD-2-Clause-NetBSD
BSD-2-Clause-Patent
BSD-3-Clause
BSD-3-Clause-Attribution
BSD-3-Clause-Clear
BSD-3-Clause-LBNL
BSD-3-Clause-No-Nuclear-License
BSD-3-Clause-No-Nuclear-License-2014
BSD-3-Clause-No-Nuclear-Warranty
BSD-4-Clause
BSD-4-Clause-UC
BSD-Protection
BSD-Source-Code
BSL-1.0
Bahyph
Barr
Beerware
BitTorrent-1.0
BitTorrent-1.1
Borceux
CATOSL-1.1
CC-BY-1.0
CC-BY-2.0
CC-BY-2.5
CC-BY-3.0
CC-BY-4.0
CC-BY-NC-1.0
CC-BY-NC-2.0
CC-BY-NC-2.5
CC-BY-NC-3.0
CC-BY-NC-4.0
CC-BY-NC-ND-1.0
CC-BY-NC-ND-2.0
CC-BY-NC-ND-2.5
CC-BY-NC-ND-3.0
CC-BY-NC-ND-4.0
CC-BY-NC-SA-1.0
CC-BY-NC-SA-2.0
CC-BY-NC-SA-2.5
CC-BY-NC-SA-3.0
CC-BY-NC-SA-4.0
CC-BY-ND-1.0
CC-BY-ND-2.0
CC-BY-ND-2.5
CC-BY-ND-3.0
CC-BY-ND-4.0
CC-BY-SA-1.0
CC-BY-SA-2.0
CC-BY-SA-2.5
CC-BY-SA-3.0
CC-BY-SA-4.0
CC0-1.0
CDDL-1.0
CDDL-1.1
CDLA-Permissive-1.0
CDLA-Sharing-1.0
CECILL-1.0
CECILL-1.1
CECILL-2.0
CECILL-2.1
CECILL-B
CECILL-C
CNRI-Jython
CNRI-Python
CNRI-Python-GPL-Compatible
CPAL-1.0
CPL-1.0
CPOL-1.02
CUA-OPL-1.0
Caldera
ClArtistic
Condor-1.1
Crossword
CrystalStacker
Cube
D-FSL-1.0
DOC
DSDP
Dotseqn
ECL-1.0
ECL-2.0
EFL-1.0
EFL-2.0
EPL-1.0
EPL-2.0
EUDatagrid
EUPL-1.0
EUPL-1.1
EUPL-1.2
Entessa
ErlPL-1.1
Eurosym
FSFAP
FSFUL
FSFULLR
FTL
Fair
Frameworx-1.0
FreeImage
GFDL-1.1
GFDL-1.1-only
GFDL-1.1-or-later
GFDL-1.2
GFDL-1.2-only
GFDL-1.2-or-later
GFDL-1.3
GFDL-1.3-only
GFDL-1.3-or-later
GL2PS
GPL-1.0
GPL-1.0+
GPL-1.0-only
GPL-1.0-or-later
GPL-2.0
GPL-2.0+
GPL-2.0-only
GPL-2.0-or-later
GPL-2.0-with-GCC-exception
GPL-2.0-with-autoconf-exception
GPL-2.0-with-bison-exception
GPL-2.0-with-classpath-exception
GPL-2.0-with-font-exception
GPL-3.0
GPL-3.0+
GPL-3.0-only
GPL-3.0-or-later
GPL-3.0-with-GCC-exception
GPL-3.0-with-autoconf-exception
Giftware
Glide
Glulxe
HPND
HaskellReport
IBM-pibs
ICU
IJG
IPA
IPL-1.0
ISC
ImageMagick
Imlib2
Info-ZIP
Intel
Intel-ACPI
Interbase-1.0
JSON
JasPer-2.0
LAL-1.2
LAL-1.3
LGPL-2.0
LGPL-2.0+
LGPL-2.0-only
LGPL-2.0-or-later
LGPL-2.1
LGPL-2.1+
LGPL-2.1-only
LGPL-2.1-or-later
LGPL-3.0
LGPL-3.0+
LGPL-3.0-only
LGPL-3.0-or-later
LGPLLR
LPL-1.0
LPL-1.02
LPPL-1.0
LPPL-1.1
LPPL-1.2
LPPL-1.3a
LPPL-1.3c
Latex2e
Leptonica
LiLiQ-P-1.1
LiLiQ-R-1.1
LiLiQ-Rplus-1.1
Libpng
MIT
MIT-CMU
MIT-advertising
MIT-enna
MIT-feh
MITNFA
MPL-1.0
MPL-1.1
MPL-2.0
MPL-2.0-no-copyleft-exception
MS-PL
MS-RL
MTLL
MakeIndex
MirOS
Motosoto
Multics
Mup
NASA-1.3
NBPL-1.0
NCSA
NGPL
NLOD-1.0
NLPL
NOSL
NPL-1.0
NPL-1.1
NPOSL-3.0
NRL
NTP
Naumen
Net-SNMP
NetCDF
Newsletr
Nokia
Noweb
Nunit
OCCT-PL
OCLC-2.0
ODbL-1.0
OFL-1.0
OFL-1.1
OGTSL
OLDAP-1.1
OLDAP-1.2
OLDAP-1.3
OLDAP-1.4
OLDAP-2.0
OLDAP-2.0.1
OLDAP-2.1
OLDAP-2.2
OLDAP-2.2.1
OLDAP-2.2.2
OLDAP-2.3
OLDAP-2.4
OLDAP-2.5
OLDAP-2.6
OLDAP-2.7
OLDAP-2.8
OML
OPL-1.0
OSET-PL-2.1
OSL-1.0
OSL-1.1
OSL-2.0
OSL-2.1
OSL-3.0
OpenSSL
PDDL-1.0
PHP-3.0
PHP-3.01
Plexus
PostgreSQL
Python-2.0
QPL-1.0
Qhull
RHeCos-1.1
RPL-1.1
RPL-1.5
RPSL-1.0
RSA-MD
RSCPL
Rdisc
Ruby
SAX-PD
SCEA
SGI-B-1.0
SGI-B-1.1
SGI-B-2.0
SISSL
SISSL-1.2
SMLNJ
SMPPL
SNIA
SPL-1.0
SWL
Saxpath
Sendmail
SimPL-2.0
Sleepycat
Spencer-86
Spencer-94
Spencer-99
StandardML-NJ
SugarCRM-1.1.3
TCL
TCP-wrappers
TMate
TORQUE-1.1
TOSL
UPL-1.0
Unicode-DFS-2015
Unicode-DFS-2016
Unicode-TOU
Unlicense
VOSTROM
VSL-1.0
Vim
W3C
W3C-19980720
W3C-20150513
WTFPL
Watcom-1.0
Wsuipa
X11
XFree86-1.1
XSkat
Xerox
Xnet
YPL-1.0
YPL-1.1
ZPL-1.1
ZPL-2.0
ZPL-2.1
Zed
Zend-2.0
Zimbra-1.3
Zimbra-1.4
Zlib
bzip2-1.0.5
bzip2-1.0.6
curl
diffmark
dvipdfm
eCos-2.0
eGenix
gSOAP-1.3b
gnuplot
iMatix
libtiff
mpich2
psfrag
psutils
wxWindows
xinetd
xpp
zlib-acknowledgement
).freeze
# exception identifiers
EXCEPTION_IDENTIFIERS = %w(
389-exception
Autoconf-exception-2.0
Autoconf-exception-3.0
Bison-exception-2.2
Bootloader-exception
CLISP-exception-2.0
Classpath-exception-2.0
DigiRule-FOSS-exception
FLTK-exception
Fawkes-Runtime-exception
Font-exception-2.0
GCC-exception-2.0
GCC-exception-3.1
LZMA-exception
Libtool-exception
Linux-syscall-note
Nokia-Qt-exception-1.1
OCCT-exception-1.0
Qwt-exception-1.0
WxWindows-exception-3.1
eCos-exception-2.0
freertos-exception-2.0
gnu-javamail-exception
i2p-gpl-java-exception
mif-exception
openvpn-openssl-exception
u-boot-exception-2.0
).freeze
REGEXP = %r{
\A
(
#{Regexp.union(LICENSE_IDENTIFIERS)}
\+?
(\s WITH \s #{Regexp.union(EXCEPTION_IDENTIFIERS)})?
| #{NONSTANDARD}
)
\Z
}ox.freeze
def self.match?(license)
!REGEXP.match(license).nil?
end
def self.suggestions(license)
by_distance = LICENSE_IDENTIFIERS.group_by do |identifier|
levenshtein_distance(identifier, license)
end
lowest = by_distance.keys.min
return unless lowest < license.size
by_distance[lowest]
end
end
| 17.390909 | 71 | 0.509671 |
4abcfb3162cb8597bca9ee096b38e52ebfea71c7 | 598 | require "formula"
class Fleetctl < Formula
homepage "https://github.com/coreos/fleet"
url "https://github.com/coreos/fleet/archive/v0.5.4.tar.gz"
sha1 "1fec5e4d23627446bce52eae691cb233ef03e17b"
head "https://github.com/coreos/fleet.git"
bottle do
sha1 "80417f9cb656b30100f5fb06bb4d4067a6cd3d93" => :mavericks
sha1 "a8cb08ce6ed71d44bbede0eb50260e14851d6ae2" => :mountain_lion
sha1 "a62e570c01a879c361b36a7b6b91db6fc052e9b2" => :lion
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
system "./build"
bin.install "bin/fleetctl"
end
end
| 26 | 69 | 0.734114 |
082b090ea9b7b082db8c6e42569347398b3ba9a1 | 51,656 | require 'puppet'
require 'getoptlong'
require 'puppet/util/watched_file'
require 'puppet/util/command_line/puppet_option_parser'
require 'forwardable'
require 'fileutils'
# The class for handling configuration files.
class Puppet::Settings
extend Forwardable
include Enumerable
require 'puppet/settings/errors'
require 'puppet/settings/base_setting'
require 'puppet/settings/string_setting'
require 'puppet/settings/enum_setting'
require 'puppet/settings/symbolic_enum_setting'
require 'puppet/settings/array_setting'
require 'puppet/settings/file_setting'
require 'puppet/settings/directory_setting'
require 'puppet/settings/file_or_directory_setting'
require 'puppet/settings/path_setting'
require 'puppet/settings/boolean_setting'
require 'puppet/settings/terminus_setting'
require 'puppet/settings/duration_setting'
require 'puppet/settings/ttl_setting'
require 'puppet/settings/priority_setting'
require 'puppet/settings/autosign_setting'
require 'puppet/settings/config_file'
require 'puppet/settings/value_translator'
require 'puppet/settings/environment_conf'
require 'puppet/settings/server_list_setting'
require 'puppet/settings/certificate_revocation_setting'
# local reference for convenience
PuppetOptionParser = Puppet::Util::CommandLine::PuppetOptionParser
attr_accessor :files
attr_reader :timer
# These are the settings that every app is required to specify; there are
# reasonable defaults defined in application.rb.
REQUIRED_APP_SETTINGS = [:logdir, :confdir, :vardir, :codedir]
# The acceptable sections of the puppet.conf configuration file.
ALLOWED_SECTION_NAMES = ['main', 'master', 'agent', 'user'].freeze
NONE = 'none'.freeze
# This method is intended for puppet internal use only; it is a convenience method that
# returns reasonable application default settings values for a given run_mode.
def self.app_defaults_for_run_mode(run_mode)
{
:name => run_mode.to_s,
:run_mode => run_mode.name,
:confdir => run_mode.conf_dir,
:codedir => run_mode.code_dir,
:vardir => run_mode.var_dir,
:rundir => run_mode.run_dir,
:logdir => run_mode.log_dir,
}
end
def self.default_certname()
hostname = hostname_fact
domain = domain_fact
if domain and domain != ""
fqdn = [hostname, domain].join(".")
else
fqdn = hostname
end
fqdn.to_s.gsub(/\.$/, '')
end
def self.hostname_fact()
Facter.value :hostname
end
def self.domain_fact()
Facter.value :domain
end
def self.default_config_file_name
"puppet.conf"
end
def stringify_settings(section, settings = :all)
values_from_the_selected_section =
values(nil, section.to_sym)
loader_settings = {
:environmentpath => values_from_the_selected_section.interpolate(:environmentpath),
:basemodulepath => values_from_the_selected_section.interpolate(:basemodulepath),
}
Puppet.override(Puppet.base_context(loader_settings),
_("New environment loaders generated from the requested section.")) do
# And now we can lookup values that include those from environments configured from
# the requested section
values = values(Puppet[:environment].to_sym, section.to_sym)
to_be_rendered = {}
settings = Puppet.settings.to_a.collect(&:first) if settings == :all
settings.sort.each do |setting_name|
to_be_rendered[setting_name] = values.print(setting_name.to_sym)
end
stringifyhash(to_be_rendered)
end
end
def stringifyhash(hash)
newhash = {}
hash.each do |key, val|
key = key.to_s
if val.is_a? Hash
newhash[key] = stringifyhash(val)
elsif val.is_a? Symbol
newhash[key] = val.to_s
else
newhash[key] = val
end
end
newhash
end
# Create a new collection of config settings.
def initialize
@config = {}
@shortnames = {}
@created = []
# Keep track of set values.
@value_sets = {
:cli => Values.new(:cli, @config),
:memory => Values.new(:memory, @config),
:application_defaults => Values.new(:application_defaults, @config),
:overridden_defaults => Values.new(:overridden_defaults, @config),
}
@configuration_file = nil
# And keep a per-environment cache
@cache = Hash.new { |hash, key| hash[key] = {} }
@values = Hash.new { |hash, key| hash[key] = {} }
# The list of sections we've used.
@used = []
@hooks_to_call_on_application_initialization = []
@deprecated_setting_names = []
@deprecated_settings_that_have_been_configured = []
@translate = Puppet::Settings::ValueTranslator.new
@config_file_parser = Puppet::Settings::ConfigFile.new(@translate)
end
# Retrieve a config value
# @param param [Symbol] the name of the setting
# @return [Object] the value of the setting
# @api private
def [](param)
if @deprecated_setting_names.include?(param)
issue_deprecation_warning(setting(param), "Accessing '#{param}' as a setting is deprecated.")
end
value(param)
end
# Set a config value. This doesn't set the defaults, it sets the value itself.
# @param param [Symbol] the name of the setting
# @param value [Object] the new value of the setting
# @api private
def []=(param, value)
if @deprecated_setting_names.include?(param)
issue_deprecation_warning(setting(param), "Modifying '#{param}' as a setting is deprecated.")
end
@value_sets[:memory].set(param, value)
unsafe_flush_cache
end
# Create a new default value for the given setting. The default overrides are
# higher precedence than the defaults given in defaults.rb, but lower
# precedence than any other values for the setting. This allows one setting
# `a` to change the default of setting `b`, but still allow a user to provide
# a value for setting `b`.
#
# @param param [Symbol] the name of the setting
# @param value [Object] the new default value for the setting
# @api private
def override_default(param, value)
@value_sets[:overridden_defaults].set(param, value)
unsafe_flush_cache
end
# Generate the list of valid arguments, in a format that GetoptLong can
# understand, and add them to the passed option list.
def addargs(options)
# Add all of the settings as valid options.
self.each { |name, setting|
setting.getopt_args.each { |args| options << args }
}
options
end
# Generate the list of valid arguments, in a format that OptionParser can
# understand, and add them to the passed option list.
def optparse_addargs(options)
# Add all of the settings as valid options.
self.each { |name, setting|
options << setting.optparse_args
}
options
end
# Is our setting a boolean setting?
def boolean?(param)
param = param.to_sym
@config.include?(param) and @config[param].kind_of?(BooleanSetting)
end
# Remove all set values, potentially skipping cli values.
def clear
unsafe_clear
end
# Remove all set values, potentially skipping cli values.
def unsafe_clear(clear_cli = true, clear_application_defaults = false)
if clear_application_defaults
@value_sets[:application_defaults] = Values.new(:application_defaults, @config)
@app_defaults_initialized = false
end
if clear_cli
@value_sets[:cli] = Values.new(:cli, @config)
# Only clear the 'used' values if we were explicitly asked to clear out
# :cli values; otherwise, it may be just a config file reparse,
# and we want to retain this cli values.
@used = []
end
@value_sets[:memory] = Values.new(:memory, @config)
@value_sets[:overridden_defaults] = Values.new(:overridden_defaults, @config)
@deprecated_settings_that_have_been_configured.clear
@values.clear
@cache.clear
end
private :unsafe_clear
# Clears all cached settings for a particular environment to ensure
# that changes to environment.conf are reflected in the settings if
# the environment timeout has expired.
#
# param [String, Symbol] environment the name of environment to clear settings for
#
# @api private
def clear_environment_settings(environment)
if environment.nil?
return
end
@cache[environment.to_sym].clear
@values[environment.to_sym] = {}
end
# Clear @cache, @used and the Environment.
#
# Whenever an object is returned by Settings, a copy is stored in @cache.
# As long as Setting attributes that determine the content of returned
# objects remain unchanged, Settings can keep returning objects from @cache
# without re-fetching or re-generating them.
#
# Whenever a Settings attribute changes, such as @values or @preferred_run_mode,
# this method must be called to clear out the caches so that updated
# objects will be returned.
def flush_cache
unsafe_flush_cache
end
def unsafe_flush_cache
clearused
end
private :unsafe_flush_cache
def clearused
@cache.clear
@used = []
end
def global_defaults_initialized?()
@global_defaults_initialized
end
def initialize_global_settings(args = [], require_config = true)
raise Puppet::DevError, _("Attempting to initialize global default settings more than once!") if global_defaults_initialized?
# The first two phases of the lifecycle of a puppet application are:
# 1) Parse the command line options and handle any of them that are
# registered, defined "global" puppet settings (mostly from defaults.rb).
# 2) Parse the puppet config file(s).
parse_global_options(args)
parse_config_files(require_config)
@global_defaults_initialized = true
end
# This method is called during application bootstrapping. It is responsible for parsing all of the
# command line options and initializing the settings accordingly.
#
# It will ignore options that are not defined in the global puppet settings list, because they may
# be valid options for the specific application that we are about to launch... however, at this point
# in the bootstrapping lifecycle, we don't yet know what that application is.
def parse_global_options(args)
# Create an option parser
option_parser = PuppetOptionParser.new
option_parser.ignore_invalid_options = true
# Add all global options to it.
self.optparse_addargs([]).each do |option|
option_parser.on(*option) do |arg|
opt, val = Puppet::Settings.clean_opt(option[0], arg)
handlearg(opt, val)
end
end
option_parser.on('--run_mode',
"The effective 'run mode' of the application: master, agent, or user.",
:REQUIRED) do |arg|
Puppet.settings.preferred_run_mode = arg
end
option_parser.parse(args)
# remove run_mode options from the arguments so that later parses don't think
# it is an unknown option.
while option_index = args.index('--run_mode') do #rubocop:disable Lint/AssignmentInCondition
args.delete_at option_index
args.delete_at option_index
end
args.reject! { |arg| arg.start_with? '--run_mode=' }
end
private :parse_global_options
# A utility method (public, is used by application.rb and perhaps elsewhere) that munges a command-line
# option string into the format that Puppet.settings expects. (This mostly has to deal with handling the
# "no-" prefix on flag/boolean options).
#
# @param [String] opt the command line option that we are munging
# @param [String, TrueClass, FalseClass] val the value for the setting (as determined by the OptionParser)
def self.clean_opt(opt, val)
# rewrite --[no-]option to --no-option if that's what was given
if opt =~ /\[no-\]/ and !val
opt = opt.gsub(/\[no-\]/,'no-')
end
# otherwise remove the [no-] prefix to not confuse everybody
opt = opt.gsub(/\[no-\]/, '')
[opt, val]
end
def app_defaults_initialized?
@app_defaults_initialized
end
def initialize_app_defaults(app_defaults)
REQUIRED_APP_SETTINGS.each do |key|
raise SettingsError, "missing required app default setting '#{key}'" unless app_defaults.has_key?(key)
end
app_defaults.each do |key, value|
if key == :run_mode
self.preferred_run_mode = value
else
@value_sets[:application_defaults].set(key, value)
unsafe_flush_cache
end
end
apply_metadata
call_hooks_deferred_to_application_initialization
issue_deprecations
REQUIRED_APP_SETTINGS.each do |key|
create_ancestors(Puppet[key])
end
@app_defaults_initialized = true
end
# Create ancestor directories.
#
# @param dir [String] absolute path for a required application default directory
# @api private
def create_ancestors(dir)
parent_dir = File.dirname(dir)
if !File.exist?(parent_dir)
FileUtils.mkdir_p(parent_dir)
end
end
private :create_ancestors
def call_hooks_deferred_to_application_initialization(options = {})
@hooks_to_call_on_application_initialization.each do |setting|
begin
setting.handle(self.value(setting.name))
rescue InterpolationError => err
raise InterpolationError, err.message, err.backtrace unless options[:ignore_interpolation_dependency_errors]
#swallow. We're not concerned if we can't call hooks because dependencies don't exist yet
#we'll get another chance after application defaults are initialized
end
end
end
private :call_hooks_deferred_to_application_initialization
# Return a value's description.
def description(name)
obj = @config[name.to_sym]
if obj
obj.desc
else
nil
end
end
def_delegators :@config, :each, :each_pair, :each_key
# Iterate over each section name.
def eachsection
yielded = []
@config.each_value do |object|
section = object.section
unless yielded.include? section
yield section
yielded << section
end
end
end
# Returns a given setting by name
# @param name [Symbol] The name of the setting to fetch
# @return [Puppet::Settings::BaseSetting] The setting object
def setting(param)
param = param.to_sym
@config[param]
end
# Handle a command-line argument.
def handlearg(opt, value = nil)
@cache.clear
if value.is_a?(FalseClass)
value = "false"
elsif value.is_a?(TrueClass)
value = "true"
end
value &&= @translate[value]
str = opt.sub(/^--/,'')
bool = true
newstr = str.sub(/^no-/, '')
if newstr != str
str = newstr
bool = false
end
str = str.intern
if @config[str].is_a?(Puppet::Settings::BooleanSetting)
if value == "" or value.nil?
value = bool
end
end
s = @config[str]
if s
@deprecated_settings_that_have_been_configured << s if s.completely_deprecated?
end
@value_sets[:cli].set(str, value)
unsafe_flush_cache
end
def include?(name)
name = name.intern if name.is_a? String
@config.include?(name)
end
# Prints the contents of a config file with the available config settings, or it
# prints a single value of a config setting.
def print_config_options
if Puppet::Util::Log.sendlevel?(:info)
Puppet::Util::Log.newdestination(:console)
message = (_("Using --configprint is deprecated. Use 'puppet config <subcommand>' instead."))
Puppet.deprecation_warning(message)
end
env = value(:environment)
val = value(:configprint)
if val == "all"
hash = {}
each do |name, obj|
val = value(name,env)
val = val.inspect if val == ""
hash[name] = val
end
hash.sort { |a,b| a[0].to_s <=> b[0].to_s }.each do |name, v|
puts "#{name} = #{v}"
end
else
val.split(/\s*,\s*/).sort.each do |v|
if include?(v)
#if there is only one value, just print it for back compatibility
if v == val
puts value(val,env)
break
end
puts "#{v} = #{value(v,env)}"
else
puts "invalid setting: #{v}"
return false
end
end
end
true
end
def generate_config
puts to_config
true
end
def generate_manifest
puts to_manifest
true
end
def print_configs
return print_config_options if value(:configprint) != ""
return generate_config if value(:genconfig)
generate_manifest if value(:genmanifest)
end
def print_configs?
(value(:configprint) != "" || value(:genconfig) || value(:genmanifest)) && true
end
# The currently configured run mode that is preferred for constructing the application configuration.
def preferred_run_mode
@preferred_run_mode_name || :user
end
# PRIVATE! This only exists because we need a hook to validate the run mode when it's being set, and
# it should never, ever, ever, ever be called from outside of this file.
# This method is also called when --run_mode MODE is used on the command line to set the default
#
# @param mode [String|Symbol] the name of the mode to have in effect
# @api private
def preferred_run_mode=(mode)
mode = mode.to_s.downcase.intern
raise ValidationError, "Invalid run mode '#{mode}'" unless [:master, :agent, :user].include?(mode)
@preferred_run_mode_name = mode
# Changing the run mode has far-reaching consequences. Flush any cached
# settings so they will be re-generated.
flush_cache
mode
end
def parse_config(text, file = "text")
begin
data = @config_file_parser.parse_file(file, text, ALLOWED_SECTION_NAMES)
rescue => detail
Puppet.log_exception(detail, "Could not parse #{file}: #{detail}")
return
end
# If we get here and don't have any data, we just return and don't muck with the current state of the world.
return if data.nil?
# If we get here then we have some data, so we need to clear out any
# previous settings that may have come from config files.
unsafe_clear(false, false)
# Screen settings which have been deprecated and removed from puppet.conf
# but are still valid on the command line and/or in environment.conf
screen_non_puppet_conf_settings(data)
# Make note of deprecated settings we will warn about later in initialization
record_deprecations_from_puppet_conf(data)
# And now we can repopulate with the values from our last parsing of the config files.
@configuration_file = data
# Determine our environment, if we have one.
if @config[:environment]
env = self.value(:environment).to_sym
else
env = NONE
end
# Call any hooks we should be calling.
value_sets = value_sets_for(env, preferred_run_mode)
@config.values.select(&:has_hook?).each do |setting|
value_sets.each do |source|
if source.include?(setting.name)
# We still have to use value to retrieve the value, since
# we want the fully interpolated value, not $vardir/lib or whatever.
# This results in extra work, but so few of the settings
# will have associated hooks that it ends up being less work this
# way overall.
if setting.call_hook_on_initialize?
@hooks_to_call_on_application_initialization |= [ setting ]
else
setting.handle(ChainedValues.new(
preferred_run_mode,
env,
value_sets,
@config).interpolate(setting.name))
end
break
end
end
end
call_hooks_deferred_to_application_initialization :ignore_interpolation_dependency_errors => true
apply_metadata
end
# Parse the configuration file. Just provides thread safety.
def parse_config_files(require_config = true)
file = which_configuration_file
if Puppet::FileSystem.exist?(file)
begin
text = read_file(file)
rescue => detail
message = _("Could not load %{file}: %{detail}") % { file: file, detail: detail}
if require_config
Puppet.log_and_raise(detail, message)
else
Puppet.log_exception(detail, message)
return
end
end
else
return
end
parse_config(text, file)
end
private :parse_config_files
def main_config_file
if explicit_config_file?
return self[:config]
else
return File.join(Puppet::Util::RunMode[:master].conf_dir, config_file_name)
end
end
private :main_config_file
def user_config_file
return File.join(Puppet::Util::RunMode[:user].conf_dir, config_file_name)
end
private :user_config_file
# This method is here to get around some life-cycle issues. We need to be
# able to determine the config file name before the settings / defaults are
# fully loaded. However, we also need to respect any overrides of this value
# that the user may have specified on the command line.
#
# The easiest way to do this is to attempt to read the setting, and if we
# catch an error (meaning that it hasn't been set yet), we'll fall back to
# the default value.
def config_file_name
begin
return self[:config_file_name] if self[:config_file_name]
rescue SettingsError
# This just means that the setting wasn't explicitly set on the command line, so we will ignore it and
# fall through to the default name.
end
return self.class.default_config_file_name
end
private :config_file_name
def apply_metadata
# We have to do it in the reverse of the search path,
# because multiple sections could set the same value
# and I'm too lazy to only set the metadata once.
if @configuration_file
searchpath(nil, preferred_run_mode).reverse_each do |source|
section = @configuration_file.sections[source.name] if source.type == :section
if section
apply_metadata_from_section(section)
end
end
end
end
private :apply_metadata
def apply_metadata_from_section(section)
section.settings.each do |setting|
type = @config[setting.name] if setting.has_metadata?
if type
type.set_meta(setting.meta)
end
end
end
SETTING_TYPES = {
:string => StringSetting,
:file => FileSetting,
:directory => DirectorySetting,
:file_or_directory => FileOrDirectorySetting,
:path => PathSetting,
:boolean => BooleanSetting,
:terminus => TerminusSetting,
:duration => DurationSetting,
:ttl => TTLSetting,
:array => ArraySetting,
:enum => EnumSetting,
:symbolic_enum => SymbolicEnumSetting,
:priority => PrioritySetting,
:autosign => AutosignSetting,
:server_list => ServerListSetting,
:certificate_revocation => CertificateRevocationSetting
}
# Create a new setting. The value is passed in because it's used to determine
# what kind of setting we're creating, but the value itself might be either
# a default or a value, so we can't actually assign it.
#
# See #define_settings for documentation on the legal values for the ":type" option.
def newsetting(hash)
klass = nil
hash[:section] = hash[:section].to_sym if hash[:section]
type = hash[:type]
if type
klass = SETTING_TYPES[type]
unless klass
raise ArgumentError, _("Invalid setting type '%{type}'") % { type: type }
end
hash.delete(:type)
else
# The only implicit typing we still do for settings is to fall back to "String" type if they didn't explicitly
# specify a type. Personally I'd like to get rid of this too, and make the "type" option mandatory... but
# there was a little resistance to taking things quite that far for now. --cprice 2012-03-19
klass = StringSetting
end
hash[:settings] = self
setting = klass.new(hash)
setting
end
# This has to be private, because it doesn't add the settings to @config
private :newsetting
# Iterate across all of the objects in a given section.
def persection(section)
section = section.to_sym
self.each { |name, obj|
if obj.section == section
yield obj
end
}
end
# Reparse our config file, if necessary.
def reparse_config_files
if files
filename = any_files_changed?
if filename
Puppet.notice "Config file #{filename} changed; triggering re-parse of all config files."
parse_config_files
reuse
end
end
end
def files
return @files if @files
@files = []
[main_config_file, user_config_file].each do |path|
if Puppet::FileSystem.exist?(path)
@files << Puppet::Util::WatchedFile.new(path)
end
end
@files
end
private :files
# Checks to see if any of the config files have been modified
# @return the filename of the first file that is found to have changed, or
# nil if no files have changed
def any_files_changed?
files.each do |file|
return file.to_str if file.changed?
end
nil
end
private :any_files_changed?
def reuse
return unless defined?(@used)
new = @used
@used = []
self.use(*new)
end
class SearchPathElement < Struct.new(:name, :type); end
# The order in which to search for values, without defaults.
#
# @param environment [String,Symbol] symbolic reference to an environment name
# @param run_mode [Symbol] symbolic reference to a Puppet run mode
# @return [Array<SearchPathElement>]
# @api private
def configsearchpath(environment = nil, run_mode = preferred_run_mode)
searchpath = [
SearchPathElement.new(:memory, :values),
SearchPathElement.new(:cli, :values),
]
searchpath << SearchPathElement.new(environment.intern, :environment) if environment
searchpath << SearchPathElement.new(run_mode, :section) if run_mode
searchpath << SearchPathElement.new(:main, :section)
end
# The order in which to search for values.
#
# @param environment [String,Symbol] symbolic reference to an environment name
# @param run_mode [Symbol] symbolic reference to a Puppet run mode
# @return [Array<SearchPathElement>]
# @api private
def searchpath(environment = nil, run_mode = preferred_run_mode)
searchpath = configsearchpath(environment, run_mode)
searchpath << SearchPathElement.new(:application_defaults, :values)
searchpath << SearchPathElement.new(:overridden_defaults, :values)
end
def service_user_available?
return @service_user_available if defined?(@service_user_available)
if self[:user]
user = Puppet::Type.type(:user).new :name => self[:user], :audit => :ensure
@service_user_available = user.exists?
else
@service_user_available = false
end
end
def service_group_available?
return @service_group_available if defined?(@service_group_available)
if self[:group]
group = Puppet::Type.type(:group).new :name => self[:group], :audit => :ensure
@service_group_available = group.exists?
else
@service_group_available = false
end
end
# Allow later inspection to determine if the setting was set on the
# command line, or through some other code path. Used for the
# `dns_alt_names` option during cert generate. --daniel 2011-10-18
def set_by_cli?(param)
param = param.to_sym
!@value_sets[:cli].lookup(param).nil?
end
# Get values from a search path entry.
# @api private
def searchpath_values(source)
case source.type
when :values
@value_sets[source.name]
when :section
section = @configuration_file.sections[source.name] if @configuration_file
if section
ValuesFromSection.new(source.name, section)
end
when :environment
ValuesFromEnvironmentConf.new(source.name)
else
raise Puppet::DevError, _("Unknown searchpath case: %{source_type} for the %{source} settings path element.") % { source_type: source.type, source: source}
end
end
# Allow later inspection to determine if the setting was set by user
# config, rather than a default setting.
def set_by_config?(param, environment = nil, run_mode = preferred_run_mode)
param = param.to_sym
configsearchpath(environment, run_mode).any? do |source|
vals = searchpath_values(source)
if vals
vals.lookup(param)
end
end
end
# Patches the value for a param in a section.
# This method is required to support the use case of unifying --dns-alt-names and
# --dns_alt_names in the certificate face. Ideally this should be cleaned up.
# See PUP-3684 for more information.
# For regular use of setting a value, the method `[]=` should be used.
# @api private
#
def patch_value(param, value, type)
if @value_sets[type]
@value_sets[type].set(param, value)
unsafe_flush_cache
end
end
# Define a group of settings.
#
# @param [Symbol] section a symbol to use for grouping multiple settings together into a conceptual unit. This value
# (and the conceptual separation) is not used very often; the main place where it will have a potential impact
# is when code calls Settings#use method. See docs on that method for further details, but basically that method
# just attempts to do any preparation that may be necessary before code attempts to leverage the value of a particular
# setting. This has the most impact for file/directory settings, where #use will attempt to "ensure" those
# files / directories.
# @param [Hash[Hash]] defs the settings to be defined. This argument is a hash of hashes; each key should be a symbol,
# which is basically the name of the setting that you are defining. The value should be another hash that specifies
# the parameters for the particular setting. Legal values include:
# [:default] => not required; this is the value for the setting if no other value is specified (via cli, config file, etc.)
# For string settings this may include "variables", demarcated with $ or ${} which will be interpolated with values of other settings.
# The default value may also be a Proc that will be called only once to evaluate the default when the setting's value is retrieved.
# [:desc] => required; a description of the setting, used in documentation / help generation
# [:type] => not required, but highly encouraged! This specifies the data type that the setting represents. If
# you do not specify it, it will default to "string". Legal values include:
# :string - A generic string setting
# :boolean - A boolean setting; values are expected to be "true" or "false"
# :file - A (single) file path; puppet may attempt to create this file depending on how the settings are used. This type
# also supports additional options such as "mode", "owner", "group"
# :directory - A (single) directory path; puppet may attempt to create this file depending on how the settings are used. This type
# also supports additional options such as "mode", "owner", "group"
# :path - This is intended to be used for settings whose value can contain multiple directory paths, represented
# as strings separated by the system path separator (e.g. system path, module path, etc.).
# [:mode] => an (optional) octal value to be used as the permissions/mode for :file and :directory settings
# [:owner] => optional owner username/uid for :file and :directory settings
# [:group] => optional group name/gid for :file and :directory settings
#
def define_settings(section, defs)
section = section.to_sym
call = []
defs.each do |name, hash|
raise ArgumentError, _("setting definition for '%{name}' is not a hash!") % { name: name } unless hash.is_a? Hash
name = name.to_sym
hash[:name] = name
hash[:section] = section
raise ArgumentError, _("Setting %{name} is already defined") % { name: name } if @config.include?(name)
tryconfig = newsetting(hash)
short = tryconfig.short
if short
other = @shortnames[short]
if other
raise ArgumentError, _("Setting %{name} is already using short name '%{short}'") % { name: other.name, short: short }
end
@shortnames[short] = tryconfig
end
@config[name] = tryconfig
# Collect the settings that need to have their hooks called immediately.
# We have to collect them so that we can be sure we're fully initialized before
# the hook is called.
if tryconfig.has_hook?
if tryconfig.call_hook_on_define?
call << tryconfig
elsif tryconfig.call_hook_on_initialize?
@hooks_to_call_on_application_initialization |= [ tryconfig ]
end
end
@deprecated_setting_names << name if tryconfig.deprecated?
end
call.each do |setting|
setting.handle(self.value(setting.name))
end
end
# Convert the settings we manage into a catalog full of resources that model those settings.
def to_catalog(*sections)
sections = nil if sections.empty?
catalog = Puppet::Resource::Catalog.new("Settings", Puppet::Node::Environment::NONE)
@config.keys.find_all { |key| @config[key].is_a?(FileSetting) }.each do |key|
file = @config[key]
next if file.value.nil?
next unless (sections.nil? or sections.include?(file.section))
resource = file.to_resource
next unless resource
next if catalog.resource(resource.ref)
Puppet.debug {"Using settings: adding file resource '#{key}': '#{resource.inspect}'"}
catalog.add_resource(resource)
end
add_user_resources(catalog, sections)
add_environment_resources(catalog, sections)
catalog
end
# Convert our list of config settings into a configuration file.
def to_config
str = %{The configuration file for #{Puppet.run_mode.name}. Note that this file
is likely to have unused settings in it; any setting that's
valid anywhere in Puppet can be in any config file, even if it's not used.
Every section can specify three special parameters: owner, group, and mode.
These parameters affect the required permissions of any files specified after
their specification. Puppet will sometimes use these parameters to check its
own configured state, so they can be used to make Puppet a bit more self-managing.
The file format supports octothorpe-commented lines, but not partial-line comments.
Generated on #{Time.now}.
}.gsub(/^/, "# ")
# Add a section heading that matches our name.
str += "[#{preferred_run_mode}]\n"
eachsection do |section|
persection(section) do |obj|
str += obj.to_config + "\n" unless obj.name == :genconfig
end
end
return str
end
# Convert to a parseable manifest
def to_manifest
catalog = to_catalog
catalog.resource_refs.collect do |ref|
catalog.resource(ref).to_manifest
end.join("\n\n")
end
# Create the necessary objects to use a section. This is idempotent;
# you can 'use' a section as many times as you want.
def use(*sections)
sections = sections.collect { |s| s.to_sym }
sections = sections.reject { |s| @used.include?(s) }
return if sections.empty?
Puppet.debug("Applying settings catalog for sections #{sections.join(', ')}")
begin
catalog = to_catalog(*sections).to_ral
rescue => detail
Puppet.log_and_raise(detail, "Could not create resources for managing Puppet's files and directories in sections #{sections.inspect}: #{detail}")
end
catalog.host_config = false
catalog.apply do |transaction|
if transaction.any_failed?
report = transaction.report
status_failures = report.resource_statuses.values.select { |r| r.failed? }
status_fail_msg = status_failures.
collect(&:events).
flatten.
select { |event| event.status == 'failure' }.
collect { |event| "#{event.resource}: #{event.message}" }.join("; ")
raise "Got #{status_failures.length} failure(s) while initializing: #{status_fail_msg}"
end
end
sections.each { |s| @used << s }
@used.uniq!
end
def valid?(param)
param = param.to_sym
@config.has_key?(param)
end
# Retrieve an object that can be used for looking up values of configuration
# settings.
#
# @param environment [Symbol] The name of the environment in which to lookup
# @param section [Symbol] The name of the configuration section in which to lookup
# @return [Puppet::Settings::ChainedValues] An object to perform lookups
# @api public
def values(environment, section)
@values[environment][section] ||= ChainedValues.new(
section,
environment,
value_sets_for(environment, section),
@config)
end
# Find the correct value using our search path.
#
# @param param [String, Symbol] The value to look up
# @param environment [String, Symbol] The environment to check for the value
# @param bypass_interpolation [true, false] Whether to skip interpolation
#
# @return [Object] The looked up value
#
# @raise [InterpolationError]
def value(param, environment = nil, bypass_interpolation = false)
environment &&= environment.to_sym
value_sym(param.to_sym, environment, bypass_interpolation)
end
# Find the correct value using symbols and our search path.
#
# @param param [Symbol] The value to look up
# @param environment [Symbol] The environment to check for the value
# @param bypass_interpolation [true, false] Whether to skip interpolation
#
# @return [Object] The looked up value
#
# @raise [InterpolationError]
def value_sym(param, environment = nil, bypass_interpolation = false)
# Check the cache first. It needs to be a per-environment
# cache so that we don't spread values from one env
# to another.
cached_env = @cache[environment || NONE]
# Avoid two lookups in cache_env unless val is nil. When it is, it's important
# to check if the key is included so that further processing (that will result
# in nil again) is avoided.
val = cached_env[param]
return val if !val.nil? || cached_env.include?(param)
# Short circuit to nil for undefined settings.
return nil unless @config.include?(param)
vals = values(environment, preferred_run_mode)
val = bypass_interpolation ? vals.lookup(param) : vals.interpolate(param)
cached_env[param] = val
val
end
##
# (#15337) All of the logic to determine the configuration file to use
# should be centralized into this method. The simplified approach is:
#
# 1. If there is an explicit configuration file, use that. (--confdir or
# --config)
# 2. If we're running as a root process, use the system puppet.conf
# (usually /etc/puppetlabs/puppet/puppet.conf)
# 3. Otherwise, use the user puppet.conf (usually ~/.puppetlabs/etc/puppet/puppet.conf)
#
# @api private
# @todo this code duplicates {Puppet::Util::RunMode#which_dir} as described
# in {https://projects.puppetlabs.com/issues/16637 #16637}
def which_configuration_file
if explicit_config_file? or Puppet.features.root? then
return main_config_file
else
return user_config_file
end
end
# This method just turns a file into a new ConfigFile::Conf instance
# @param file [String] absolute path to the configuration file
# @return [Puppet::Settings::ConfigFile::Conf]
# @api private
def parse_file(file, allowed_sections = [])
@config_file_parser.parse_file(file, read_file(file), allowed_sections)
end
private
DEPRECATION_REFS = {
# intentionally empty. This could be repopulated if we deprecate more settings
# and have reference links to associate with them
}.freeze
def screen_non_puppet_conf_settings(puppet_conf)
puppet_conf.sections.values.each do |section|
forbidden = section.settings.select { |setting| Puppet::Settings::EnvironmentConf::ENVIRONMENT_CONF_ONLY_SETTINGS.include?(setting.name) }
raise(SettingsError, "Cannot set #{forbidden.map { |s| s.name }.join(", ")} settings in puppet.conf") if !forbidden.empty?
end
end
# Record that we want to issue a deprecation warning later in the application
# initialization cycle when we have settings bootstrapped to the point where
# we can read the Puppet[:disable_warnings] setting.
#
# We are only recording warnings applicable to settings set in puppet.conf
# itself.
def record_deprecations_from_puppet_conf(puppet_conf)
puppet_conf.sections.values.each do |section|
section.settings.each do |conf_setting|
setting = self.setting(conf_setting.name)
if setting
@deprecated_settings_that_have_been_configured << setting if setting.deprecated?
end
end
end
end
def issue_deprecations
@deprecated_settings_that_have_been_configured.each do |setting|
issue_deprecation_warning(setting)
end
end
def issue_deprecation_warning(setting, msg = nil)
name = setting.name
ref = DEPRECATION_REFS.find { |params,reference| params.include?(name) }
ref = ref[1] if ref
case
when msg
msg << " #{ref}" if ref
Puppet.deprecation_warning(msg)
when setting.completely_deprecated?
message = _("Setting %{name} is deprecated.") % { name: name }
message += " #{ref}"
Puppet.deprecation_warning(message, "setting-#{name}")
when setting.allowed_on_commandline?
#TRANSLATORS 'puppet.conf' is a file name and should not be translated
message = _("Setting %{name} is deprecated in puppet.conf.") % { name: name }
message += " #{ref}"
Puppet.deprecation_warning(message, "puppet-conf-setting-#{name}")
end
end
def add_environment_resources(catalog, sections)
path = self[:environmentpath]
envdir = path.split(File::PATH_SEPARATOR).first if path
configured_environment = self[:environment]
if configured_environment == "production" && envdir && Puppet::FileSystem.exist?(envdir)
configured_environment_path = File.join(envdir, configured_environment)
if !Puppet::FileSystem.symlink?(configured_environment_path)
parameters = { :ensure => 'directory' }
unless Puppet::FileSystem.exist?(configured_environment_path)
parameters.merge!(:mode => '0750')
if Puppet.features.root?
parameters.merge!(:owner => Puppet[:user]) if service_user_available?
parameters.merge!(:group => Puppet[:group]) if service_group_available?
end
end
catalog.add_resource(Puppet::Resource.new(:file, configured_environment_path, :parameters => parameters))
end
end
end
def add_user_resources(catalog, sections)
return unless Puppet.features.root?
return if Puppet::Util::Platform.windows?
return unless self[:mkusers]
@config.each do |name, setting|
next unless setting.respond_to?(:owner)
next unless sections.nil? or sections.include?(setting.section)
user = setting.owner
if user && user != "root" && catalog.resource(:user, user).nil?
resource = Puppet::Resource.new(:user, user, :parameters => {:ensure => :present})
resource[:gid] = self[:group] if self[:group]
catalog.add_resource resource
end
group = setting.group
if group && ! %w{root wheel}.include?(group) && catalog.resource(:group, group).nil?
catalog.add_resource Puppet::Resource.new(:group, group, :parameters => {:ensure => :present})
end
end
end
# Yield each search source in turn.
def value_sets_for(environment, mode)
searchpath(environment, mode).collect { |source| searchpath_values(source) }.compact
end
# Read the file in.
# @api private
def read_file(file)
return Puppet::FileSystem.read(file, :encoding => 'utf-8')
end
# Private method for internal test use only; allows to do a comprehensive clear of all settings between tests.
#
# @return nil
def clear_everything_for_tests()
unsafe_clear(true, true)
@configuration_file = nil
@global_defaults_initialized = false
@app_defaults_initialized = false
end
private :clear_everything_for_tests
def explicit_config_file?
# Figure out if the user has provided an explicit configuration file. If
# so, return the path to the file, if not return nil.
#
# The easiest way to determine whether an explicit one has been specified
# is to simply attempt to evaluate the value of ":config". This will
# obviously be successful if they've passed an explicit value for :config,
# but it will also result in successful interpolation if they've only
# passed an explicit value for :confdir.
#
# If they've specified neither, then the interpolation will fail and we'll
# get an exception.
#
begin
return true if self[:config]
rescue InterpolationError
# This means we failed to interpolate, which means that they didn't
# explicitly specify either :config or :confdir... so we'll fall out to
# the default value.
return false
end
end
private :explicit_config_file?
# Lookup configuration setting value through a chain of different value sources.
#
# @api public
class ChainedValues
ENVIRONMENT_SETTING = "environment".freeze
ENVIRONMENT_INTERPOLATION_ALLOWED = ['config_version'].freeze
# @see Puppet::Settings.values
# @api private
def initialize(mode, environment, value_sets, defaults)
@mode = mode
@environment = environment
@value_sets = value_sets
@defaults = defaults
end
# Lookup the uninterpolated value.
#
# @param name [Symbol] The configuration setting name to look up
# @return [Object] The configuration setting value or nil if the setting is not known
# @api public
def lookup(name)
set = @value_sets.find do |value_set|
value_set.include?(name)
end
if set
value = set.lookup(name)
if !value.nil?
return value
end
end
@defaults[name].default
end
# Lookup the interpolated value. All instances of `$name` in the value will
# be replaced by performing a lookup of `name` and substituting the text
# for `$name` in the original value. This interpolation is only performed
# if the looked up value is a String.
#
# @param name [Symbol] The configuration setting name to look up
# @return [Object] The configuration setting value or nil if the setting is not known
# @api public
def interpolate(name)
setting = @defaults[name]
return nil unless setting
lookup_and_convert(name) do |val|
setting.munge(val)
end
end
def print(name)
setting = @defaults[name]
return nil unless setting
lookup_and_convert(name) do |val|
setting.print(val)
end
end
private
def lookup_and_convert(name, &block)
val = lookup(name)
# if we interpolate code, all hell breaks loose.
if name == :code
val
else
# Convert it if necessary
begin
val = convert(val, name)
rescue InterpolationError => err
# This happens because we don't have access to the param name when the
# exception is originally raised, but we want it in the message
raise InterpolationError, _("Error converting value for param '%{name}': %{detail}") % { name: name, detail: err }, err.backtrace
end
yield val
end
end
def convert(value, setting_name)
case value
when nil
nil
when String
failed_environment_interpolation = false
interpolated_value = value.gsub(/\$(\w+)|\$\{(\w+)\}/) do |expression|
varname = $2 || $1
interpolated_expression =
if varname != ENVIRONMENT_SETTING || ok_to_interpolate_environment(setting_name)
if varname == ENVIRONMENT_SETTING && @environment
@environment
elsif varname == "run_mode"
@mode
elsif !(pval = interpolate(varname.to_sym)).nil?
pval
else
raise InterpolationError, _("Could not find value for %{expression}") % { expression: expression }
end
else
failed_environment_interpolation = true
expression
end
interpolated_expression
end
if failed_environment_interpolation
#TRANSLATORS '$environment' is a Puppet specific variable and should not be translated
Puppet.warning(_("You cannot interpolate $environment within '%{setting_name}' when using directory environments.") % { setting_name: setting_name } +
' ' + _("Its value will remain %{value}.") % { value: interpolated_value })
end
interpolated_value
else
value
end
end
def ok_to_interpolate_environment(setting_name)
ENVIRONMENT_INTERPOLATION_ALLOWED.include?(setting_name.to_s)
end
end
class Values
extend Forwardable
attr_reader :name
def initialize(name, defaults)
@name = name
@values = {}
@defaults = defaults
end
def_delegator :@values, :include?
def_delegator :@values, :[], :lookup
def set(name, value)
default = @defaults[name]
if !default
raise ArgumentError, _("Attempt to assign a value to unknown setting %{name}") % { name: name.inspect }
end
# This little exception-handling dance ensures that a hook is
# able to check whether a value for itself has been explicitly
# set, while still preserving the existing value if the hook
# throws (as was existing behavior)
old_value = @values[name]
@values[name] = value
begin
if default.has_hook?
default.handle(value)
end
rescue Exception => e
@values[name] = old_value
raise e
end
end
def inspect
%Q{<#{self.class}:#{self.object_id} @name="#{@name}" @values="#{@values}">}
end
end
class ValuesFromSection
attr_reader :name
def initialize(name, section)
@name = name
@section = section
end
def include?(name)
[email protected](name).nil?
end
def lookup(name)
setting = @section.setting(name)
if setting
setting.value
end
end
def inspect
%Q{<#{self.class}:#{self.object_id} @name="#{@name}" @section="#{@section}">}
end
end
# @api private
class ValuesFromEnvironmentConf
def initialize(environment_name)
@environment_name = environment_name
end
def name
@environment_name
end
def include?(name)
if Puppet::Settings::EnvironmentConf::VALID_SETTINGS.include?(name) && conf
return true
end
false
end
def lookup(name)
return nil unless Puppet::Settings::EnvironmentConf::VALID_SETTINGS.include?(name)
conf.send(name) if conf
end
def conf
unless @conf
environments = Puppet.lookup(:environments) { nil }
@conf = environments.get_conf(@environment_name) if environments
end
@conf
end
def inspect
%Q{<#{self.class}:#{self.object_id} @environment_name="#{@environment_name}" @conf="#{@conf}">}
end
end
end
| 33.369509 | 161 | 0.68056 |
1d1bb385e551d7f8fe2d3090d614f4ab87f38800 | 1,592 | class Mozjpeg < Formula
desc "Improved JPEG encoder"
homepage "https://github.com/mozilla/mozjpeg"
url "https://github.com/mozilla/mozjpeg/archive/v4.0.3.tar.gz"
sha256 "4f22731db2afa14531a5bf2633d8af79ca5cb697a550f678bf43f24e5e409ef0"
license "BSD-3-Clause"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 arm64_big_sur: "43d05f184bc2c2f0451913c9d6a437dd597c9da0fc675fd6a96859face7d8819"
sha256 big_sur: "62b7cba57dec06208ee2af6a726b918c0131c0d4f4b735d32eab16df348e1852"
sha256 catalina: "0664824dab3ebe497562d4b9fcb1fdafd011d7f0bcd6d50dc60bd73db57168cc"
sha256 mojave: "0188f192ba8d6471e034d8144b321a84871d46cf110fb27bdebb67f2d9116baa"
sha256 cellar: :any_skip_relocation, x86_64_linux: "bfd4e10acdc52b050974be904ccb9c8cbcbaa563f7c2e29e64935534ba6d53e1"
end
keg_only "mozjpeg is not linked to prevent conflicts with the standard libjpeg"
depends_on "cmake" => :build
depends_on "nasm" => :build
depends_on "libpng"
def install
mkdir "build" do
args = std_cmake_args - %w[-DCMAKE_INSTALL_LIBDIR=lib]
system "cmake", "..", *args, "-DCMAKE_INSTALL_LIBDIR=#{lib}"
system "make"
system "make", "install"
end
end
test do
system bin/"jpegtran", "-crop", "1x1",
"-transpose", "-optimize",
"-outfile", "out.jpg",
test_fixtures("test.jpg")
end
end
| 37.023256 | 122 | 0.640704 |
3852f8fcc76b9bb8158798b12f3ba77c871ea959 | 510 | module DomoscioViz
# @abstract
class Resource
class << self
def class_name
name.split('::')[-1]
end
def url(util_name = nil, on_self = nil )
if self == Resource
raise NotImplementedError.new('Resource is an abstract class. Do not use it directly.')
end
build_url = ""
if !on_self
if util_name
build_url << "/#{util_name}"
end
end
return build_url
end
end
end
end | 21.25 | 97 | 0.52549 |
e89ab70d9ef1a6ead2c7cf4a5533535a7d47542a | 3,966 | # -*- encoding: utf-8 -*-
# stub: sprockets 3.7.0 ruby lib
Gem::Specification.new do |s|
s.name = "sprockets".freeze
s.version = "3.7.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Sam Stephenson".freeze, "Joshua Peek".freeze]
s.date = "2016-07-21"
s.description = "Sprockets is a Rack-based asset packaging system that concatenates and serves JavaScript, CoffeeScript, CSS, LESS, Sass, and SCSS.".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.executables = ["sprockets".freeze]
s.files = ["bin/sprockets".freeze]
s.homepage = "https://github.com/rails/sprockets".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.3".freeze)
s.rubyforge_project = "sprockets".freeze
s.rubygems_version = "2.6.1".freeze
s.summary = "Rack-based asset packaging system".freeze
s.installed_by_version = "2.6.1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rack>.freeze, ["< 3", "> 1"])
s.add_runtime_dependency(%q<concurrent-ruby>.freeze, ["~> 1.0"])
s.add_development_dependency(%q<closure-compiler>.freeze, ["~> 1.1"])
s.add_development_dependency(%q<coffee-script-source>.freeze, ["~> 1.6"])
s.add_development_dependency(%q<coffee-script>.freeze, ["~> 2.2"])
s.add_development_dependency(%q<eco>.freeze, ["~> 1.0"])
s.add_development_dependency(%q<ejs>.freeze, ["~> 1.0"])
s.add_development_dependency(%q<execjs>.freeze, ["~> 2.0"])
s.add_development_dependency(%q<minitest>.freeze, ["~> 5.0"])
s.add_development_dependency(%q<nokogiri>.freeze, ["~> 1.3"])
s.add_development_dependency(%q<rack-test>.freeze, ["~> 0.6"])
s.add_development_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_development_dependency(%q<sass>.freeze, ["~> 3.1"])
s.add_development_dependency(%q<uglifier>.freeze, ["~> 2.3"])
s.add_development_dependency(%q<yui-compressor>.freeze, ["~> 0.12"])
else
s.add_dependency(%q<rack>.freeze, ["< 3", "> 1"])
s.add_dependency(%q<concurrent-ruby>.freeze, ["~> 1.0"])
s.add_dependency(%q<closure-compiler>.freeze, ["~> 1.1"])
s.add_dependency(%q<coffee-script-source>.freeze, ["~> 1.6"])
s.add_dependency(%q<coffee-script>.freeze, ["~> 2.2"])
s.add_dependency(%q<eco>.freeze, ["~> 1.0"])
s.add_dependency(%q<ejs>.freeze, ["~> 1.0"])
s.add_dependency(%q<execjs>.freeze, ["~> 2.0"])
s.add_dependency(%q<minitest>.freeze, ["~> 5.0"])
s.add_dependency(%q<nokogiri>.freeze, ["~> 1.3"])
s.add_dependency(%q<rack-test>.freeze, ["~> 0.6"])
s.add_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_dependency(%q<sass>.freeze, ["~> 3.1"])
s.add_dependency(%q<uglifier>.freeze, ["~> 2.3"])
s.add_dependency(%q<yui-compressor>.freeze, ["~> 0.12"])
end
else
s.add_dependency(%q<rack>.freeze, ["< 3", "> 1"])
s.add_dependency(%q<concurrent-ruby>.freeze, ["~> 1.0"])
s.add_dependency(%q<closure-compiler>.freeze, ["~> 1.1"])
s.add_dependency(%q<coffee-script-source>.freeze, ["~> 1.6"])
s.add_dependency(%q<coffee-script>.freeze, ["~> 2.2"])
s.add_dependency(%q<eco>.freeze, ["~> 1.0"])
s.add_dependency(%q<ejs>.freeze, ["~> 1.0"])
s.add_dependency(%q<execjs>.freeze, ["~> 2.0"])
s.add_dependency(%q<minitest>.freeze, ["~> 5.0"])
s.add_dependency(%q<nokogiri>.freeze, ["~> 1.3"])
s.add_dependency(%q<rack-test>.freeze, ["~> 0.6"])
s.add_dependency(%q<rake>.freeze, ["~> 10.0"])
s.add_dependency(%q<sass>.freeze, ["~> 3.1"])
s.add_dependency(%q<uglifier>.freeze, ["~> 2.3"])
s.add_dependency(%q<yui-compressor>.freeze, ["~> 0.12"])
end
end
| 50.202532 | 157 | 0.63414 |
abd965ea63bfb8a3de42ead1d9c0795f8128fae5 | 168 | class AddCommunityManagerToUsers < ActiveRecord::Migration[4.2]
def change
add_column :users, :community_manager, :boolean, null: false, default: false
end
end
| 28 | 80 | 0.767857 |
110c303b86e83a92132e5bb788c23f9490e311ba | 9,265 | class Mitmproxy < Formula
desc "Intercept, modify, replay, save HTTP/S traffic"
homepage "https://mitmproxy.org"
url "https://github.com/mitmproxy/mitmproxy/archive/v0.16.tar.gz"
sha256 "d4ed9b27377431fc3d6241b43588279cb7e41e64a134a981938e162143c1ec72"
head "https://github.com/mitmproxy/mitmproxy.git"
bottle do
cellar :any
revision 1
sha256 "aaa06f4c92841c3576be9c95a926fc8196053ba9c8430d811afeb853b03fdfff" => :el_capitan
sha256 "31542212f077d5c69bc895c3de6eed60d7a766e9c876fc7f49d336a212b81704" => :yosemite
sha256 "9b6f1ea41c3d06aeab2f9dbe4ffc4834700915fe060a2b278cf6228c799ddf44" => :mavericks
end
option "with-pyamf", "Enable action message format (AMF) support for python"
option "with-cssutils", "Enable beautification of CSS responses"
depends_on "freetype"
depends_on "jpeg"
depends_on "openssl"
depends_on :python if MacOS.version <= :snow_leopard
depends_on "protobuf" => :optional
# needs a recent setuptools
resource "setuptools" do
url "https://pypi.python.org/packages/source/s/setuptools/setuptools-20.2.2.tar.gz"
sha256 "24fcfc15364a9fe09a220f37d2dcedc849795e3de3e4b393ee988e66a9cbd85a"
end
resource "argh" do
url "https://pypi.python.org/packages/source/a/argh/argh-0.26.1.tar.gz"
sha256 "06a7442cb9130fb8806fe336000fcf20edf1f2f8ad205e7b62cec118505510db"
end
resource "backports_abc" do
url "https://pypi.python.org/packages/source/b/backports_abc/backports_abc-0.4.tar.gz"
sha256 "8b3e4092ba3d541c7a2f9b7d0d9c0275b21c6a01c53a61c731eba6686939d0a5"
end
resource "backports.ssl_match_hostname" do
url "https://pypi.python.org/packages/source/b/backports.ssl_match_hostname/backports.ssl_match_hostname-3.5.0.1.tar.gz"
sha256 "502ad98707319f4a51fa2ca1c677bd659008d27ded9f6380c79e8932e38dcdf2"
end
resource "blinker" do
url "https://pypi.python.org/packages/source/b/blinker/blinker-1.4.tar.gz"
sha256 "471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"
end
resource "certifi" do
url "https://pypi.python.org/packages/source/c/certifi/certifi-2016.2.28.tar.gz"
sha256 "5e8eccf95924658c97b990b50552addb64f55e1e3dfe4880456ac1f287dc79d0"
end
resource "cffi" do
url "https://pypi.python.org/packages/source/c/cffi/cffi-1.5.2.tar.gz"
sha256 "da9bde99872e46f7bb5cff40a9b1cc08406765efafb583c704de108b6cb821dd"
end
resource "click" do
url "https://pypi.python.org/packages/source/c/click/click-6.6.tar.gz"
sha256 "cc6a19da8ebff6e7074f731447ef7e112bd23adf3de5c597cf9989f2fd8defe9"
end
resource "ConfigArgParse" do
url "https://pypi.python.org/packages/source/C/ConfigArgParse/ConfigArgParse-0.10.0.tar.gz"
sha256 "3b50a83dd58149dfcee98cb6565265d10b53e9c0a2bca7eeef7fb5f5524890a7"
end
resource "construct" do
url "https://pypi.python.org/packages/source/c/construct/construct-2.5.2.tar.gz"
sha256 "665b6271eeadf15219c726b180c8d7a641d026784d72ca3dad90a20aae009020"
end
resource "cryptography" do
url "https://pypi.python.org/packages/source/c/cryptography/cryptography-1.2.3.tar.gz"
sha256 "8eb11c77dd8e73f48df6b2f7a7e16173fe0fe8fdfe266232832e88477e08454e"
end
resource "enum34" do
url "https://pypi.python.org/packages/source/e/enum34/enum34-1.1.2.tar.gz"
sha256 "2475d7fcddf5951e92ff546972758802de5260bf409319a9f1934e6bbc8b1dc7"
end
resource "h2" do
url "https://pypi.python.org/packages/source/h/h2/h2-2.1.3.tar.gz"
sha256 "7d36132c42edcc19e771555e569c3f3a8610ff052e8e3d19c91b4c29a1c31e49"
end
resource "hpack" do
url "https://pypi.python.org/packages/source/h/hpack/hpack-2.1.1.tar.gz"
sha256 "17cf0750f0555447f546b4754f69c8a906a3c10a51d1884c83e41f4f3bd71f8a"
end
resource "html2text" do
url "https://pypi.python.org/packages/source/h/html2text/html2text-2016.1.8.tar.gz"
sha256 "088046f9b126761ff7e3380064d4792279766abaa5722d0dd765d011cf0bb079"
end
resource "hyperframe" do
url "https://pypi.python.org/packages/source/h/hyperframe/hyperframe-3.2.0.tar.gz"
sha256 "05f0e063e117c16fcdd13c12c93a4424a2c40668abfac3bb419a10f57698204e"
end
resource "idna" do
url "https://pypi.python.org/packages/source/i/idna/idna-2.1.tar.gz"
sha256 "ed36f281aebf3cd0797f163bb165d84c31507cedd15928b095b1675e2d04c676"
end
resource "ipaddress" do
url "https://pypi.python.org/packages/source/i/ipaddress/ipaddress-1.0.16.tar.gz"
sha256 "5a3182b322a706525c46282ca6f064d27a02cffbd449f9f47416f1dc96aa71b0"
end
resource "lxml" do
url "https://pypi.python.org/packages/source/l/lxml/lxml-3.5.0.tar.gz"
sha256 "349f93e3a4b09cc59418854ab8013d027d246757c51744bf20069bc89016f578"
end
resource "netlib" do
url "https://pypi.python.org/packages/source/n/netlib/netlib-0.16.tar.gz"
sha256 "c70ed1915a5662c9ffce4dc97d143209e009cf0035a2f692031a6c47e87e6002"
end
resource "passlib" do
url "https://pypi.python.org/packages/source/p/passlib/passlib-1.6.5.tar.gz"
sha256 "a83d34f53dc9b17aa42c9a35c3fbcc5120f3fcb07f7f8721ec45e6a27be347fc"
end
resource "pathtools" do
url "https://pypi.python.org/packages/source/p/pathtools/pathtools-0.1.2.tar.gz"
sha256 "7c35c5421a39bb82e58018febd90e3b6e5db34c5443aaaf742b3f33d4655f1c0"
end
resource "Pillow" do
url "https://pypi.python.org/packages/source/P/Pillow/Pillow-3.1.2.tar.gz"
sha256 "c593622445503ae1ee361d3a6bb40794e043b43d00c96fcb298ba43ecd375905"
end
resource "pyasn1" do
url "https://pypi.python.org/packages/source/p/pyasn1/pyasn1-0.1.9.tar.gz"
sha256 "853cacd96d1f701ddd67aa03ecc05f51890135b7262e922710112f12a2ed2a7f"
end
resource "pycparser" do
url "https://pypi.python.org/packages/source/p/pycparser/pycparser-2.14.tar.gz"
sha256 "7959b4a74abdc27b312fed1c21e6caf9309ce0b29ea86b591fd2e99ecdf27f73"
end
resource "pyOpenSSL" do
url "https://pypi.python.org/packages/source/p/pyOpenSSL/pyOpenSSL-0.15.1.tar.gz"
sha256 "f0a26070d6db0881de8bcc7846934b7c3c930d8f9c79d45883ee48984bc0d672"
end
resource "pyparsing" do
url "https://pypi.python.org/packages/source/p/pyparsing/pyparsing-2.1.1.tar.gz"
sha256 "9bae5cd4cbee6da0d7d8d9a1647f5253a3b89652e707647eaf1961f4932ae6c6"
end
resource "pyperclip" do
url "https://pypi.python.org/packages/source/p/pyperclip/pyperclip-1.5.27.zip"
sha256 "a3cb6df5d8f1557ca8fc514d94fabf50dc5a97042c90e5ba4f3611864fed3fc5"
end
resource "PyYAML" do
url "https://pypi.python.org/packages/source/P/PyYAML/PyYAML-3.11.tar.gz"
sha256 "c36c938a872e5ff494938b33b14aaa156cb439ec67548fcab3535bb78b0846e8"
end
resource "singledispatch" do
url "https://pypi.python.org/packages/source/s/singledispatch/singledispatch-3.4.0.3.tar.gz"
sha256 "5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c"
end
resource "six" do
url "https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
resource "tornado" do
url "https://pypi.python.org/packages/source/t/tornado/tornado-4.3.tar.gz"
sha256 "c9c2d32593d16eedf2cec1b6a41893626a2649b40b21ca9c4cac4243bde2efbf"
end
resource "urwid" do
url "https://pypi.python.org/packages/source/u/urwid/urwid-1.3.1.tar.gz"
sha256 "cfcec03e36de25a1073e2e35c2c7b0cc6969b85745715c3a025a31d9786896a1"
end
resource "watchdog" do
url "https://pypi.python.org/packages/source/w/watchdog/watchdog-0.8.3.tar.gz"
sha256 "7e65882adb7746039b6f3876ee174952f8eaaa34491ba34333ddf1fe35de4162"
end
# Optional resources
resource "PyAMF" do
url "https://pypi.python.org/packages/source/P/PyAMF/PyAMF-0.8.0.tar.gz"
sha256 "0455d68983e3ee49f82721132074877428d58acec52f19697a88c03b5fba74e4"
end
resource "cssutils" do
url "https://pypi.python.org/packages/source/c/cssutils/cssutils-1.0.1.tar.gz"
sha256 "d8a18b2848ea1011750231f1dd64fe9053dbec1be0b37563c582561e7a529063"
end
def install
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
unless MacOS::CLT.installed?
ENV.append "CPPFLAGS", "-I#{MacOS.sdk_path}/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers"
ENV.append "CPPFLAGS", "-I#{MacOS.sdk_path}/usr/include/ffi" # libffi
end
resource("Pillow").stage do
inreplace "setup.py", "'brew', '--prefix'", "'#{HOMEBREW_PREFIX}/bin/brew', '--prefix'"
saved_sdkroot = ENV.delete "SDKROOT"
begin
system "python", *Language::Python.setup_install_args(libexec/"vendor")
ensure
ENV["SDKROOT"] = saved_sdkroot
end
end
res = resources.map(&:name).to_set - ["Pillow"]
res << "PyAMF" if build.with? "pyamf"
res << "cssutils" if build.with? "cssutils"
res.each do |r|
resource(r).stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
end
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python2.7/site-packages"
system "python", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
ENV["LANG"] = "en_US.UTF-8"
system bin/"mitmproxy", "--version"
end
end
| 37.358871 | 124 | 0.764274 |
7ad60fe3a60f598ec303c264d80ee1b7cd014fd5 | 187 | class String
def prepend_at
"@#{self}"
end
def strip_ats
tr('@', '')
end
alias_method :old_to_i, :to_i
def to_i(base = 10)
tr(',', '').old_to_i(base)
end
end
| 11.6875 | 31 | 0.561497 |
3878270770cb9a7bc3981bbd69c405be6925111b | 716 | module ActiveRecord
module TypeCaster
class Connection
def initialize(klass, table_name)
@klass = klass
@table_name = table_name
end
def type_cast_for_database(attribute_name, value)
return value if value.is_a?(Arel::Nodes::BindParam)
column = column_for(attribute_name)
connection.type_cast_from_column(column, value)
end
protected
attr_reader :table_name
delegate :connection, to: :@klass
private
def column_for(attribute_name)
if connection.schema_cache.table_exists?(table_name)
connection.schema_cache.columns_hash(table_name)[attribute_name.to_s]
end
end
end
end
end
| 23.866667 | 79 | 0.671788 |
62c2f49e76edc0884085bd53bd851cf2b0f8aa0e | 7,756 | require 'cdo/chat_client'
require 'cdo/google_drive'
#require src_dir 'database'
class CsvToSqlTable
def initialize(path, params={})
@db = params[:db] || DB
@path = path
@table = File.basename(@path, File.extname(@path)).tr('-', '_').to_sym
end
def up_to_date?
seed = DB[:seed_info].where(table: @table.to_s).first
return false unless seed
mtime = File.mtime(@path)
mtime.to_s == seed[:mtime].to_s
end
def import
import! unless up_to_date?
@table
end
def import!
ChatClient.log "Importing <b>#{@table}</b> table from <b>#{File.basename(@path)}</b>"
# Starting with 1 means the first item's ID is 2 which matches the id to the line number of the item.
at = 1
CSV.open(@path, 'rb') do |csv|
table, columns = create_table(csv.shift)
while values = csv.shift
table.insert(hash_from_keys_and_values(columns, values).merge({id: at += 1}))
end
end
set_table_mtime(File.mtime(@path))
ChatClient.log "Imported <b>#{at - 1}</b> rows into <b>#{@table}</b>"
@table
end
private
def hash_from_keys_and_values(keys, values)
h = {}
(0..keys.count - 1).each do |i|
key_name = keys[i].to_s
value =
case key_name[key_name.rindex('_')..-1]
when '_b'
values[i].to_bool
when '_f'
values[i].to_f
when '_i'
values[i].to_i
else
values[i]
end
h[keys[i]] = value
end
h
end
def create_table(columns)
schema = columns.map {|column| column_name_to_schema(column)}
DB.create_table!(@table, charset: 'utf8') do
primary_key :id
schema.each do |column|
add_column column[:name], type: column[:type]
index column[:name] if column[:index]
unique column[:name] if column[:unique]
end
end
[DB[@table], schema.map {|i| i[:name]}]
end
def column_name_to_schema(name)
i = name.rindex('_')
if i.nil?
ChatClient.log "Bad column name (#{name}) for table (#{@table}), see this " \
"<a href='https://drive.google.com/drive/folders/0B0OFfWqnAHxhM0prRGd0UWczMUU'>Google Drive</a> folder."
end
if name.ends_with?('!') || name.ends_with?('*')
type_flag = name[-1..-1]
name = name[0..-2]
end
type_info = name[i..-1]
type = {
'_b' => {type: 'boolean'},
'_dt' => {type: 'datetime'},
'_f' => {type: 'float'},
'_i' => {type: 'integer'},
'_bi' => {type: 'bigint'},
'_s' => {type: 'varchar(255)'},
'_ss' => {type: 'varchar(255)'},
'_t' => {type: 'text'},
}[type_info] || {type: 'varchar(255)'}
type = type.merge(unique: true) if type_flag == '!'
type = type.merge(index: true) if type_flag == '*'
type.merge({name: name.to_sym})
end
def set_table_mtime(mtime)
seed_info = DB[:seed_info]
if seed_info.where(table: @table.to_s).first
seed_info.where(table: @table.to_s).update(mtime: mtime)
else
seed_info.insert(table: @table.to_s, mtime: mtime)
end
end
end
class GSheetToCsv
@@gdrive = nil
def initialize(path)
@gsheet_path, settings_yml = IO.read(path).strip.split("\n", 2)
settings = YAML.load(settings_yml.to_s) || {}
@include_columns = settings['include_columns'] || []
@exclude_columns = settings['exclude_columns'] || []
@csv_path = File.join(File.dirname(path), File.basename(path, File.extname(path)) + '.csv')
@file = nil
end
def up_to_date?
@file ||= (@@gdrive ||= Google::Drive.new).file(@gsheet_path)
unless @file
ChatClient.log "Google Drive file <b>#{@gsheet_path}</b> not found.", color: 'red', notify: 1
return
end
begin
mtime = @file.mtime
ctime = File.mtime(@csv_path) if File.file?(@csv_path)
return mtime.to_s == ctime.to_s
rescue GoogleDrive::Error => e
ChatClient.log "<p>Error getting modified time for <b>#{@gsheet_path}<b> from Google Drive.</p><pre><code>#{e.message}</code></pre>", color: 'yellow'
true # Assume the current thing is up to date.
end
end
def import
import! unless up_to_date?
@csv_path
end
def import!
ChatClient.log "Downloading <b>#{@gsheet_path}</b> from Google Drive."
@file ||= (@@gdrive ||= Google::Drive.new).file(@gsheet_path)
unless @file
ChatClient.log "Google Drive file <b>#{@gsheet_path}</b> not found.", color: 'red', notify: 1
return
end
begin
buf = @file.spreadsheet_csv
rescue GoogleDrive::Error => e
puts "Error on file: #{@gsheet_path}, #{e}"
throw e
end
CSV.open(@csv_path, 'wb') do |csv|
columns = nil
CSV.parse(buf, headers: true) do |row|
unless columns
# Determine the set of columns to be output.
columns = row.headers
unless @include_columns.empty?
columns &= @include_columns
end
columns -= @exclude_columns
# Output the columns.
csv << columns
end
csv << columns.map {|i| row[i]}
end
end
File.utime(File.atime(@csv_path), @file.mtime, @csv_path)
ChatClient.log "Downloaded <b>#{@gsheet_path}</b> (<b>#{File.size(@csv_path)}</b> btyes) from Google Drive."
return @csv_path
end
end
$gdrive_ = nil
namespace :seed do
def csv_smart_value(value)
return true if value == 'TRUE'
return false if value == 'FALSE'
return Date.strptime(value.to_s.strip, '%m/%d/%Y') if value.to_s.strip =~ /^\d{1,2}\/\d{1,2}\/\d{4}$/
return value
end
def import_csv_into_table(path, table)
db = DB[table]
db.delete
auto_id = db.columns.include?(:id)
count = 0
CSV.foreach(path, headers: true, encoding: 'utf-8') do |data|
record = {}
db.columns.each {|column| record[column] = csv_smart_value(data[column.to_s])}
count += 1
record[:id] = count if auto_id
db.insert record
end
puts "#{count} items imported into #{table} from '#{path}'"
end
def stub_path(table)
cache_dir(".#{table}-imported")
end
def gdrive
$gdrive_ ||= Google::Drive.new
end
sync_tasks = []
imports = {
beyond_tutorials: 'Data/HocBeyondTutorials.gsheet',
tutorials: 'Data/HocTutorials.gsheet'
}
imports.each_pair do |table, path|
extname = File.extname(path)
if extname == '.gsheet'
gsheet = path[0..-(extname.length + 1)]
path = "cache/#{path.gsub(File::SEPARATOR, '_')}.csv"
sync = "sync:#{table}"
task sync do
if file = gdrive.file(gsheet)
mtime = file.mtime
ctime = File.mtime(path).utc if File.file?(path)
unless mtime.to_s == ctime.to_s
puts "gdrive #{path}"
IO.write(path, file.spreadsheet_csv)
File.utime(File.atime(path), mtime, path)
end
else
ChatClient.log "Google Drive file <b>#{gsheet}</b> not found.", color: 'red', notify: 1
end
end
sync_tasks << sync
end
task table do
import_csv_into_table(path, table)
end
stub = stub_path(table)
file stub => [path] do
import_csv_into_table(path, table)
touch stub
end
end
desc 'import any modified seeds'
task migrate: imports.keys.map {|i| stub_path(i)} do
Dir.glob(pegasus_dir('data/*.csv')) {|i| CsvToSqlTable.new(i).import}
end
desc 'drop and import all seeds'
task reset: imports.keys do
Dir.glob(pegasus_dir('data/*.csv')) {|i| CsvToSqlTable.new(i).import!}
end
task :sync_v3 do
Dir.glob(pegasus_dir('data/*.gsheet')) {|i| GSheetToCsv.new(i).import}
end
desc 'update remote seeds and import any modified'
task sync: [:sync_v3, sync_tasks, :migrate].flatten do
end
end
| 26.114478 | 155 | 0.599794 |
e28a5455bac4f193261be15b1431ea10bbe32bce | 4,474 | ##
# This code was generated by
# \ / _ _ _| _ _
# | (_)\/(_)(_|\/| |(/_ v1.0.0
# / /
#
# frozen_string_literal: true
module Twilio
module REST
class Voice < Domain
class V1 < Version
##
# PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact [email protected].
class DialingPermissionsList < ListResource
##
# Initialize the DialingPermissionsList
# @param [Version] version Version that contains the resource
# @return [DialingPermissionsList] DialingPermissionsList
def initialize(version)
super(version)
# Path Solution
@solution = {}
# Components
@countries = nil
@settings = nil
@bulk_country_updates = nil
end
##
# Access the countries
# @param [String] iso_code The [ISO country
# code](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2)
# @return [CountryList]
# @return [CountryContext] if iso_code was passed.
def countries(iso_code=:unset)
raise ArgumentError, 'iso_code cannot be nil' if iso_code.nil?
if iso_code != :unset
return CountryContext.new(@version, iso_code, )
end
@countries ||= CountryList.new(@version, )
end
##
# Access the settings
# @return [SettingsList]
# @return [SettingsContext]
def settings
return SettingsContext.new(@version, )
@settings ||= SettingsList.new(@version, )
end
##
# Access the bulk_country_updates
# @return [BulkCountryUpdateList]
# @return [BulkCountryUpdateContext]
def bulk_country_updates
@bulk_country_updates ||= BulkCountryUpdateList.new(@version, )
end
##
# Provide a user friendly representation
def to_s
'#<Twilio.Voice.V1.DialingPermissionsList>'
end
end
##
# PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact [email protected].
class DialingPermissionsPage < Page
##
# Initialize the DialingPermissionsPage
# @param [Version] version Version that contains the resource
# @param [Response] response Response from the API
# @param [Hash] solution Path solution for the resource
# @return [DialingPermissionsPage] DialingPermissionsPage
def initialize(version, response, solution)
super(version, response)
# Path Solution
@solution = solution
end
##
# Build an instance of DialingPermissionsInstance
# @param [Hash] payload Payload response from the API
# @return [DialingPermissionsInstance] DialingPermissionsInstance
def get_instance(payload)
DialingPermissionsInstance.new(@version, payload, )
end
##
# Provide a user friendly representation
def to_s
'<Twilio.Voice.V1.DialingPermissionsPage>'
end
end
##
# PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact [email protected].
class DialingPermissionsInstance < InstanceResource
##
# Initialize the DialingPermissionsInstance
# @param [Version] version Version that contains the resource
# @param [Hash] payload payload that contains response from Twilio
# @return [DialingPermissionsInstance] DialingPermissionsInstance
def initialize(version, payload)
super(version)
end
##
# Provide a user friendly representation
def to_s
"<Twilio.Voice.V1.DialingPermissionsInstance>"
end
##
# Provide a detailed, user friendly representation
def inspect
"<Twilio.Voice.V1.DialingPermissionsInstance>"
end
end
end
end
end
end | 34.152672 | 201 | 0.594993 |
7a688023d3650d0c7b1ffa724797a985e1aecefc | 1,752 | class UsersController < ApplicationController
before_action :logged_in_user, only: [:edit, :update, :index, :destroy, :following, :followers]
before_action :correct_user, only: [:edit, :update]
before_action :admin_user, only: :destroy
def index
@users = User.where(activated: true).paginate(page: params[:page])
end
def show
@user = User.find(params[:id])
@microposts = @user.microposts.paginate(page: params[:page])
redirect_to root_url and return unless @user.activated == true
end
def new
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
@user.send_activation_email
flash[:success] = "Please check your email to activate your account."
redirect_to root_url
else
render "new"
end
end
def edit
end
def update
if @user.update_attributes(user_params)
flash[:success] = "Profile updated"
redirect_to @user
else
render "edit"
end
end
def destroy
User.find(params[:id]).delete
flash[:success] = "User deleted"
redirect_to users_url
end
def following
@title = "Following"
@user = User.find(params[:id])
@users = @user.following.paginate(page: params[:page])
render "show_follow"
end
def followers
@title = "Followers"
@user = User.find(params[:id])
@users = @user.followers.paginate(page: params[:page])
render "show_follow"
end
private
def user_params
params.require(:user).permit(:name, :email, :password, :password_confirmation)
end
def correct_user
@user = User.find(params[:id])
redirect_to(root_url) unless current_user?(@user)
end
def admin_user
redirect_to(root_url) unless current_user.admin?
end
end
| 22.461538 | 97 | 0.672374 |
2820ea515f77f1d0570de661e4fa7192a5a50e8e | 2,196 |
RSpec.describe ETG::Exporter::Context do
def create_context **attrs
ETG::Exporter::Context.new ETG::FakeGcloudClient.new, **attrs
end
describe '#set' do
it 'base'
end
describe '#set_dump_path' do
it 'sets dump_path as String' do
context = create_context dump_path: '/tmp'
expect(context.dump_path.class).to eq(Pathname)
expect(context.dump_path.to_path).to eq('/tmp')
end
it 'sets dump_path as Pathname' do
context = create_context dump_path: Pathname.new('/tmp')
expect(context.dump_path.class).to eq(Pathname)
expect(context.dump_path.to_path).to eq('/tmp')
end
end
describe '#set_storage_prefix' do
it 'sets storage_prefix' do
context = create_context storage_prefix: 'export/'
expect(context.storage_prefix).to eq('export/')
end
end
describe '#set_bucket' do
it 'sets bucket from string' do
context = create_context bucket: 'export'
expect(context.bucket.class).to eq(ETG::FakeGcloudClient::Storage::Bucket)
expect(context.bucket.name).to eq('export')
end
it 'sets bucket as object' do
context = create_context bucket: ETG::FakeGcloudClient::Storage::Bucket.new('export')
expect(context.bucket.class).to eq(ETG::FakeGcloudClient::Storage::Bucket)
expect(context.bucket.name).to eq('export')
end
end
describe '#set_dataset' do
it 'sets dataset from string' do
context = create_context dataset: 'export'
expect(context.dataset.class).to eq(ETG::FakeGcloudClient::BigQuery::Dataset)
expect(context.dataset.key).to eq('export')
end
it 'sets dataset as object' do
context = create_context dataset: ETG::FakeGcloudClient::BigQuery::Dataset.new('export')
expect(context.dataset.class).to eq(ETG::FakeGcloudClient::BigQuery::Dataset)
expect(context.dataset.key).to eq('export')
end
end
describe '#copy' do
it 'copies itself' do
context1 = create_context storage_prefix: 'export/', bucket: 'export'
context2 = context1.copy
expect(context1.storage_prefix).to eq(context2.storage_prefix)
expect(context1.bucket).to eq(context2.bucket)
end
end
end | 31.371429 | 94 | 0.689891 |
795fe127943ca5d56a517b075ed065850c24a875 | 2,292 | # frozen_string_literal: true
require_relative 'choose_ability_on_or'
module Engine
module Game
module G18ZOO
module Step
class Dividend < Engine::Step::Dividend
include Engine::Game::G18ZOO::ChooseAbilityOnOr
def dividend_options(entity)
revenue = @game.routes_revenue(routes)
subsidy = @game.routes_subsidy(routes)
dividend_types.map do |type|
[type, send(type, entity, revenue, subsidy)]
end.to_h
end
def share_price_change(entity, revenue)
:right if revenue >= @game.threshold(entity)
end
def withhold(_entity, revenue, subsidy)
{
corporation: (revenue / 25.0).ceil + subsidy,
per_share: 0,
share_direction: :left,
share_times: 1,
divs_to_corporation: 0,
}
end
def payout(entity, revenue, subsidy)
{
corporation: subsidy,
per_share: payout_per_share(entity, revenue),
share_direction: revenue >= @game.threshold(entity) ? :right : nil,
share_times: 1,
divs_to_corporation: 0,
}
end
def dividends_for_entity(entity, holder, per_share)
holder.player? ? super : 0
end
def payout_per_share(entity, revenue)
@game.bonus_payout_for_share(@game.share_price_updated(entity, revenue))
end
def payout_shares(entity, revenue)
super(entity, revenue + @subsidy)
bonus = @game.bonus_payout_for_president(@game.share_price_updated(entity, revenue))
return unless bonus.positive?
@game.bank.spend(bonus, entity.player, check_positive: false)
@log << "President #{entity.player.name} earns #{@game.format_currency(bonus)}"\
" as a bonus from #{entity.name} run"
end
def process_dividend(action)
@subsidy = @game.routes_subsidy(routes)
super
@subsidy = 0
action.entity.remove_assignment!('BARREL') if @game.two_barrels_used_this_or?(action.entity)
end
end
end
end
end
end
| 29.384615 | 104 | 0.569372 |
e815373b9d06d98c560f32c7f46a3a7b9982907c | 1,303 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'xeroizer/version'
Gem::Specification.new do |s|
s.name = "xeroizer"
s.version = Xeroizer::VERSION.dup
s.date = "2014-09-26"
s.authors = ["Wayne Robinson"]
s.email = "[email protected]"
s.summary = "Xero library"
s.description = "Ruby library for the Xero accounting system API."
s.homepage = "http://github.com/waynerobinson/xeroizer"
s.licenses = ["MIT"]
s.files = Dir["LICENSE.txt", "README.md", 'lib/**/*']
s.test_files = `git ls-files -- test/*`.split("\n")
s.require_paths = ["lib"]
s.add_development_dependency "bundler", "~> 1.5"
s.add_development_dependency "rake"
s.add_development_dependency "mocha"
s.add_development_dependency "shoulda"
s.add_development_dependency "test-unit"
s.add_development_dependency "rest-client"
s.add_development_dependency "turn"
s.add_development_dependency "ansi"
s.add_development_dependency "redcarpet"
s.add_development_dependency "yard"
s.add_dependency "builder", ">= 2.1.2"
s.add_dependency "oauth", ">= 0.4.5"
s.add_dependency "oauth2", ">= 1.4.0"
s.add_dependency "activesupport"
s.add_dependency "nokogiri"
s.add_dependency "tzinfo"
s.add_dependency "i18n"
end
| 33.410256 | 68 | 0.714505 |
7a09d744559d368cbba27f68166f6edd62fbc9a1 | 132 | require 'spec_helper'
describe "users_controller/destroy.html.erb" do
pending "add some examples to (or delete) #{__FILE__}"
end
| 22 | 56 | 0.765152 |
ed43140974a319840a7f489bac2fbcebfc2e2c38 | 1,392 | require 'perfmon/config'
module Perfmon
class Middleware
def initialize(app)
@app = app
@csv_path = Perfmon.config.csv_path
@csv_name = Perfmon.config.csv_name
end
def call(env)
# Time request came
initial_request = Time.now
request = Rack::Request.new(env)
# Request URI
request_uri = request.path
# Parameters
params = request.params.map do |key, value|
if key.class != String
key = key.to_s
end
if value.class != String
value = value.to_s
end
key + "==>" + value
end.join('; ')
# PID
pid = Process.pid
# Thread ID
tid = Thread.current.object_id
status, headers, response = @app.call(env)
response_time = Time.now
# Response Body MD%
response_md5 = Digest::MD5.hexdigest(response.body)
# Time spent
total_time = response_time - initial_request
CSV.open(File.join(@csv_path, @csv_name), "a+") do |csv|
if File.empty?(File.join(@csv_path, @csv_name))
csv << ["Initial Request Time", "Request Completed Time", "Time Taken", "Request URI", 'Request GET Params', 'PID', 'TID', 'MD5']
end
csv << [initial_request, response_time, total_time, request_uri, params, pid, tid, response_md5]
end
return status, headers, response
end
end
end
| 29 | 139 | 0.59842 |
039c591035ebe2c83de4a3709b87a51806b9751c | 1,839 | #
# The MIT License
# Copyright (c) 2018 Estonian Information System Authority (RIA),
# Nordic Institute for Interoperability Solutions (NIIS), Population Register Centre (VRK)
# Copyright (c) 2015-2017 Estonian Information System Authority (RIA), Population Register Centre (VRK)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
class AuthCert < ActiveRecord::Base
validates_presence_of :security_server_id
validates_uniqueness_of :cert
belongs_to :security_server
before_validation do |record|
existing_certs = AuthCert.where(:cert => record.cert)
unless existing_certs.empty?
existing_cert = existing_certs[0]
security_server = existing_cert.security_server
raise I18n.t("errors.request.auth_cert_not_unique",
{:server_id => security_server.get_server_id()})
end
end
end
| 41.795455 | 103 | 0.770527 |
33b17550a88e810dc28c7cf5f6d2817923001aad | 186 | class CreateDonors < ActiveRecord::Migration
def change
create_table :donors do |t|
t.string :name
t.string :industry
t.timestamps null: false
end
end
end
| 16.909091 | 44 | 0.66129 |
f77086a0a6f1c02fcd7473763a60ee2c3a164f2e | 6,158 | # frozen_string_literal: true
require 'spec_helper'
require 'json'
require 'bolt'
require 'bolt/result'
describe Bolt::Result do
let(:target) { "foo" }
describe :initialize do
it 'sets default values' do
result = Bolt::Result.new(target)
expect(result.target).to eq('foo')
expect(result.value).to eq({})
expect(result.action).to eq('action')
expect(result.object).to eq(nil)
end
it 'sets error' do
result = Bolt::Result.new(target, error: { 'This' => 'is an error' })
expect(result.error_hash).to eq('This' => 'is an error')
expect(result.value['_error']).to eq('This' => 'is an error')
end
it 'errors if error is not a hash' do
expect { Bolt::Result.new(target, error: 'This is an error') }
.to raise_error(RuntimeError, 'TODO: how did we get a string error')
end
it 'sets message' do
result = Bolt::Result.new(target, message: 'This is a message')
expect(result.message).to eq('This is a message')
expect(result.value['_output']).to eq('This is a message')
end
end
describe :from_exception do
let(:result) do
ex = RuntimeError.new("oops")
ex.set_backtrace('/path/to/bolt/node.rb:42')
Bolt::Result.from_exception(target, ex)
end
it 'has an error' do
expect(result.error_hash['msg']).to eq("oops")
end
it 'has a target' do
expect(result.target).to eq(target)
end
it 'does not have a message' do
expect(result.message).to be_nil
end
it 'has an _error in value' do
expect(result.value['_error']['msg']).to eq("oops")
end
it 'sets default action' do
expect(result.action).to eq('action')
end
it 'sets action when specified as an argument' do
ex = RuntimeError.new("oops")
ex.set_backtrace('/path/to/bolt/node.rb:42')
result = Bolt::Result.from_exception(target, ex, action: 'custom_action')
expect(result.action).to eq('custom_action')
end
end
describe :for_command do
it 'exposes value' do
result = Bolt::Result.for_command(target, "stout", "sterr", 0, 'command', 'command', [])
expect(result.value).to eq('stdout' => 'stout', 'stderr' => 'sterr', 'exit_code' => 0)
end
it 'creates errors' do
result = Bolt::Result.for_command(target, "stout", "sterr", 1, 'command', 'command', ['/jacko/lantern', 6])
expect(result.error_hash['kind']).to eq('puppetlabs.tasks/command-error')
expect(result.error_hash['details']).to include({ 'file' => '/jacko/lantern', 'line' => 6 })
end
end
describe :for_task do
it 'parses json objects' do
obj = { "key" => "val" }
result = Bolt::Result.for_task(target, obj.to_json, '', 0, 'atask', ['/do/not/print', 8])
expect(result.value).to eq(obj)
end
it 'adds an error message if _error is missing a msg' do
obj = { '_error' => 'oops' }
result = Bolt::Result.for_task(target, obj.to_json, '', 0, 'atask', ['/pumpkin/patch', 10])
expect(result.error_hash['msg']).to match(/Invalid error returned from task atask/)
expect(result.error_hash['details']).to include({ 'original_error' => 'oops',
'file' => '/pumpkin/patch',
'line' => 10 })
end
it 'adds kind and details to _error hash if missing' do
obj = { '_error' => { 'msg' => 'oops' } }
# Ensure we don't add file and line if they aren't available
result = Bolt::Result.for_task(target, obj.to_json, '', 0, 'atask', [])
expect(result.error_hash).to eq(
'msg' => 'oops',
'kind' => 'bolt/error',
'details' => {}
)
end
it 'marks _sensitive values as sensitive' do
obj = { "user" => "someone", "_sensitive" => { "password" => "sosecretive" } }
result = Bolt::Result.for_task(target, obj.to_json, '', 0, 'atask', [])
expect(result.sensitive).to be_a(Puppet::Pops::Types::PSensitiveType::Sensitive)
expect(result.sensitive.unwrap).to eq('password' => 'sosecretive')
end
it 'excludes _output and _error from generic_value' do
obj = { "key" => "val" }
special = { "_error" => { 'msg' => 'oops' }, "_output" => "output" }
result = Bolt::Result.for_task(target, obj.merge(special).to_json, '', 0, 'atask', [])
expect(result.generic_value).to eq(obj)
end
it 'includes _sensitive in generic_value' do
obj = { "user" => "someone", "_sensitive" => { "password" => "sosecretive" } }
result = Bolt::Result.for_task(target, obj.to_json, '', 0, 'atask', [])
expect(result.generic_value.keys).to include('user', '_sensitive')
end
it "doesn't parse arrays" do
stdout = '[1, 2, 3]'
result = Bolt::Result.for_task(target, stdout, '', 0, 'atask', [])
expect(result.value).to eq('_output' => stdout)
end
it 'handles errors' do
obj = { "key" => "val",
"_error" => { "msg" => "oops", "kind" => "error", "details" => {} } }
result = Bolt::Result.for_task(target, obj.to_json, '', 1, 'atask', [])
expect(result.value).to eq(obj)
expect(result.error_hash).to eq(obj['_error'])
end
it 'uses the unparsed value of stdout if it is not valid JSON' do
stdout = 'just some string'
result = Bolt::Result.for_task(target, stdout, '', 0, 'atask', [])
expect(result.value).to eq('_output' => 'just some string')
end
it 'generates an error for binary data' do
stdout = "\xFC].\xF9\xA8\x85f\xDF{\x11d\xD5\x8E\xC6\xA6"
result = Bolt::Result.for_task(target, stdout, '', 0, 'atask', [])
expect(result.value.keys).to eq(['_error'])
expect(result.error_hash['msg']).to match(/The task result contained invalid UTF-8/)
end
it 'generates an error for non-UTF-8 output' do
stdout = "☃".encode('utf-32')
result = Bolt::Result.for_task(target, stdout, '', 0, 'atask', [])
expect(result.value.keys).to eq(['_error'])
expect(result.error_hash['msg']).to match(/The task result contained invalid UTF-8/)
end
end
end
| 36.654762 | 113 | 0.596297 |
d57ba556c55020d13d58a79a2b0af84873be7c37 | 4,430 | require 'test_helper'
require 'pd/jot_form/matrix_question'
module Pd
module JotForm
class MatrixQuestionTest < ActiveSupport::TestCase
include Constants
test 'parse jotform question data for matrix' do
jotform_question = {
qid: '1',
type: TYPE_MATRIX,
name: 'sampleMatrix',
text: 'This is a matrix label',
order: '1',
mcolumns: 'Strongly Agree|Agree|Neutral|Disagree|Strongly Disagree',
mrows: 'Question 1|Question 2'
}.stringify_keys
question = MatrixQuestion.from_jotform_question jotform_question
assert question.is_a? MatrixQuestion
assert_equal 1, question.id
assert_equal TYPE_MATRIX, question.type
assert_equal 'sampleMatrix', question.name
assert_equal 'This is a matrix label', question.text
assert_equal 1, question.order
assert_equal ANSWER_SINGLE_SELECT, question.answer_type
assert_equal ['Strongly Agree', 'Agree', 'Neutral', 'Disagree', 'Strongly Disagree'], question.options
assert_equal ['Question 1', 'Question 2'], question.sub_questions
end
test 'get_value' do
question = MatrixQuestion.new(
id: 1,
options: %w(Agree Neutral Disagree),
sub_questions: ['Question 1', 'Question 2', 'Question 3']
)
answer = {
'Question 1' => 'Neutral',
'Question 2' => '', # blank answer should be ignored
'Question 3' => 'Agree'
}
assert_equal(
{0 => 'Neutral', 2 => 'Agree'},
question.get_value(answer)
)
end
test 'get_value errors' do
question = MatrixQuestion.new(
id: 1,
options: %w(Agree Neutral Disagree),
sub_questions: ['Question 1']
)
e = assert_raises do
question.get_value({'Nonexistent Question' => 'Agree'})
end
assert_equal "Unable to find sub-question 'Nonexistent Question' in matrix question 1", e.message
e = assert_raises do
question.get_value('Question 1' => 'Nonexistent Answer')
end
assert_equal "Unable to find 'Nonexistent Answer' in the options for matrix question 1", e.message
end
test 'summarize' do
question = MatrixQuestion.new(
id: 1,
name: 'sampleMatrix',
text: 'How much do you agree or disagree with the following statements about this workshop?',
options: %w(Disagree Neutral Agree),
sub_questions: [
'I learned something',
'It was a good use of time'
]
)
expected_summary = {
'sampleMatrix_0' => {
text: 'How much do you agree or disagree with the following statements about this workshop? I learned something',
answer_type: ANSWER_SINGLE_SELECT,
options: %w(Disagree Neutral Agree),
parent: 'sampleMatrix',
max_value: 3
},
'sampleMatrix_1' => {
text: 'How much do you agree or disagree with the following statements about this workshop? It was a good use of time',
answer_type: ANSWER_SINGLE_SELECT,
options: %w(Disagree Neutral Agree),
parent: 'sampleMatrix',
max_value: 3
}
}
assert_equal expected_summary, question.summarize
end
test 'process_answer' do
question = MatrixQuestion.new(
id: 1,
name: 'sampleMatrix',
options: %w(Disagree Neutral Agree),
sub_questions: [
'I learned something',
'It was a good use of time'
],
)
answer = {
'I learned something' => 'Agree',
'It was a good use of time' => 'Neutral'
}
assert_equal(
{
'sampleMatrix_0' => 'Agree',
'sampleMatrix_1' => 'Neutral'
},
question.process_answer(answer)
)
end
test 'to hash and back' do
hash = {
id: 1,
type: TYPE_MATRIX,
name: 'a name',
text: 'label',
order: 1,
options: %w(One Two Three),
sub_questions: ['Question 1', 'Question 2']
}
question = MatrixQuestion.new(hash)
assert_equal hash, question.to_h
end
end
end
end
| 30.979021 | 131 | 0.56772 |
33b56b4093b9f0f062fb6e14e6a5d310ea7d6c8c | 4,958 | # -*- coding: utf-8 -*-
defined? PROJECT or abort "PROJECT required."
defined? RELEASE or abort "RELEASE required."
defined? COPYRIGHT or abort "COPYRIGHT required."
defined? LICENSE or abort "LICENSE required."
RELEASE =~ /\A\d+\.\d+\.\d+/ or abort "RELEASE=#{RELEASE}: invalid release number."
$ruby_versions ||= %w[2.4 2.5 2.6 2.7 3.0]
require 'rake/clean'
CLEAN << "build"
CLEAN.concat Dir.glob("#{PROJECT}-*.gem").collect {|x| x.sub(/\.gem$/, '') }
CLOBBER.concat Dir.glob("#{PROJECT}-*.gem")
desc "show release guide"
task :guide do
RELEASE != '0.0.0' or abort "rake help: required 'RELEASE=X.X.X'"
rel, proj = RELEASE, PROJECT
rel =~ /(\d+\.\d+)/
branch = "#{proj}_rel-#{$1}"
puts <<END
How to release:
$ git diff .
$ git status .
$ which ruby
$ rake test
$ rake test:all
$ rake readme:execute # optional
$ rake readme:toc # optional
$ rake package RELEASE=#{rel}
$ rake package:extract # confirm files in gem file
$ (cd #{proj}-#{rel}/data; find . -type f)
$ gem install #{proj}-#{rel}.gem # confirm gem package
$ gem uninstall #{proj}
$ gem push #{proj}-#{rel}.gem # publish gem to rubygems.org
$ git tag #{proj}-#{rel}
$ git push
$ git push --tags
$ rake clean
END
end unless Rake::Task.task_defined?(:guide)
desc "do test"
task :test do
ruby "test/run_all.rb"
end unless Rake::Task.task_defined?(:test)
if ENV['VS_HOME'] && $ruby_versions
desc "do test for different ruby versions"
task :'test:all' do
vs_home = ENV['VS_HOME'].split(/:/).first
ENV['TC_QUIET'] = "Y" if File.exist?("test/tc.rb")
comp = proc {|x, y| x.to_s.split('.').map(&:to_i) <=> y.to_s.split('.').map(&:to_i) }
$ruby_versions.each do |ver|
dir = Dir.glob("#{vs_home}/ruby/#{ver}.*").sort_by(&comp).last
next unless dir
puts "==== ruby #{ver} (#{dir}) ===="
sh "#{dir}/bin/ruby test/run_all.rb" do |ok, res|
$stderr.puts "** test failed" unless ok
end
end
end unless Rake::Task.task_defined?(:'test:all')
end
def target_files()
$_target_files ||= begin
spec_src = File.read("#{PROJECT}.gemspec", encoding: 'utf-8')
spec = eval spec_src
spec.name == PROJECT or
abort "'#{PROJECT}' != '#{spec.name}' (project name in gemspec file)"
spec.files
end
return $_target_files
end
def edit_file(filename)
File.open(filename, 'rb+') do |f|
s1 = f.read()
s2 = yield s1
if s1 != s2
f.rewind()
f.truncate(0)
f.write(s2)
true
else
false
end
end
end
desc "edit metadata in files"
task :edit do
target_files().each do |fname|
changed = edit_file(fname) do |s|
s = s.gsub(/\$Release[:].*?\$/, "$"+"Release: #{RELEASE} $") if RELE ASE != '0.0.0'
s = s.gsub(/\$Copyright[:].*?\$/, "$"+"Copyright: #{COPYRIGHT} $")
s = s.gsub(/\$License[:].*?\$/, "$"+"License: #{LICENSE} $")
s
end
puts "[C] #{fname}" if changed
puts "[U] #{fname}" unless changed
end
end unless Rake::Task.task_defined?(:edit)
desc "create package (*.gem)"
task :package do
RELEASE != '0.0.0' or abort "rake help: required 'RELEASE=X.X.X'"
## copy
dir = "build"
rm_rf dir if File.exist?(dir)
mkdir dir
target_files().each do |file|
dest = File.join(dir, File.dirname(file))
mkdir_p dest, :verbose=>false unless File.exist?(dest)
cp file, "#{dir}/#{file}"
end
## edit
Dir.glob("#{dir}/**/*").each do |file|
next unless File.file?(file)
edit_file(file) do |s|
s = s.gsub(/\$Release[:].*?\$/, "$"+"Release: #{RELEASE} $")
s = s.gsub(/\$Copyright[:].*?\$/, "$"+"Copyright: #{COPYRIGHT} $")
s = s.gsub(/\$License[:].*?\$/, "$"+"License: #{LICENSE} $")
s
end
end
## build
chdir dir do
sh "gem build #{PROJECT}.gemspec"
end
mv "#{dir}/#{PROJECT}-#{RELEASE}.gem", "."
rm_rf dir
end unless Rake::Task.task_defined?(:package)
desc "extract latest gem file"
task :'package:extract' do
gemfile = Dir.glob("#{PROJECT}-*.gem").sort_by {|x| File.mtime(x) }.last
dir = gemfile.sub(/\.gem$/, '')
rm_rf dir if File.exist?(dir)
mkdir dir
mkdir "#{dir}/data"
cd dir do
sh "tar xvf ../#{gemfile}"
sh "gunzip *.gz"
cd "data" do
sh "tar xvf ../data.tar"
end
end
end unless Rake::Task.task_defined?(:'package:extract')
desc "upload gem file to rubygems.org"
task :publish do
RELEASE != '0.0.0' or abort "rake help: required 'RELEASE=X.X.X'"
gemfile = "#{PROJECT}-#{RELEASE}.gem"
print "** Are you sure to publish #{gemfile}? [y/N]: "
answer = $stdin.gets().strip()
if answer.downcase == "y"
sh "gem push #{gemfile}"
sh "git tag ruby-#{PROJECT}-#{RELEASE}"
sh "#git push --tags"
end
end unless Rake::Task.task_defined?(:publish)
desc nil
task :'relink' do
Dir.glob("task/*.rb").each do |x|
src = "../" + x
next if File.identical?(src, x)
rm x
ln src, x
end
end
| 26.513369 | 91 | 0.585317 |
acba1a5a2bee9c0f0fe165201c3d2c37a39c7379 | 5,567 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# The connection name/value pair.
class DataIntegration::Models::ConnectionProperty
# Free form text without any restriction on permitted characters. Name can have letters, numbers, and special characters. The value is editable and is restricted to 1000 characters.
# @return [String]
attr_accessor :name
# The value for the connection name property.
# @return [String]
attr_accessor :value
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'name': :'name',
'value': :'value'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'name': :'String',
'value': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :name The value to assign to the {#name} property
# @option attributes [String] :value The value to assign to the {#value} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.name = attributes[:'name'] if attributes[:'name']
self.value = attributes[:'value'] if attributes[:'value']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
name == other.name &&
value == other.value
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[name, value].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 34.57764 | 245 | 0.666427 |
d56d9062bc76beb666b318d2ce911ad0c8c79dad | 46 | module Jupiter
VERSION = '2.3.1'.freeze
end
| 11.5 | 26 | 0.695652 |
e9918d7f6ca63621bdcf1bca6cdf54ffd1bb9b3f | 563 | require File.dirname(__FILE__) + '/models/locale'
require File.dirname(__FILE__) + '/models/translation'
require File.dirname(__FILE__) + '/models/translation_option'
require File.dirname(__FILE__) + '/routing'
require File.dirname(__FILE__) + '/controllers/locales_controller'
require File.dirname(__FILE__) + '/controllers/translations_controller'
require File.dirname(__FILE__) + '/i18n_backend_database/database'
require File.dirname(__FILE__) + '/ext/i18n'
ActionController::Routing::RouteSet::Mapper.send(:include, I18n::BackendDatabase::Routing)
| 56.3 | 92 | 0.783304 |
03e696576125c74bcced0acc1a0ad0acf674883a | 597 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Fakeinstag
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
| 31.421053 | 82 | 0.767169 |
33d6a4cf90e1ac30980a31ff906e70b26e630cd5 | 1,016 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "gapic/rest"
require "google/cloud/compute/v1/disk_types/rest/grpc_transcoding"
require "google/cloud/compute/v1/disk_types/rest/client"
module Google
module Cloud
module Compute
module V1
module DiskTypes
# Client for the REST transport
module Rest
end
end
end
end
end
end
| 28.222222 | 74 | 0.726378 |
626f49c2d3731b601ebe0f89c5d0dfe4a0279eaa | 200 | $:.push File.expand_path('../', __FILE__)
require 'plugins/testflight'
require 'plugins/hockeyapp'
require 'plugins/ftp'
require 'plugins/s3'
require 'commands/build'
require 'commands/distribute'
| 18.181818 | 41 | 0.76 |
01e8d9173ff1079a82b43586465bb838f3077d18 | 484 | require "ruby_poker/version"
require "active_support/all"
require "rainbow"
Dir[__dir__ + "/ruby_poker/*.rb"].each { |p| require p }
module RubyPoker
NUMBERS = ([1] + [*2..13].reverse).freeze
SUITS = %i[spade heart diamond club].freeze
HAND_TYPES = %i[
royal_straight_flush
straight_flush
four_of_a_kind
full_house
flush
straight
three_of_a_kind
two_pair
one_pair
high_card
].freeze
def self.play
RubyPoker::Game.play
end
end
| 18.615385 | 56 | 0.683884 |
e988fffcb95bc766158474555ae3836f7b90fddf | 1,768 | require "abstract_unit"
require "active_job"
require "mailers/params_mailer"
class ParameterizedTest < ActiveSupport::TestCase
include ActiveJob::TestHelper
setup do
@previous_logger = ActiveJob::Base.logger
ActiveJob::Base.logger = Logger.new(nil)
@previous_delivery_method = ActionMailer::Base.delivery_method
ActionMailer::Base.delivery_method = :test
@previous_deliver_later_queue_name = ActionMailer::Base.deliver_later_queue_name
ActionMailer::Base.deliver_later_queue_name = :test_queue
@mail = ParamsMailer.with(inviter: "[email protected]", invitee: "[email protected]").invitation
end
teardown do
ActiveJob::Base.logger = @previous_logger
ParamsMailer.deliveries.clear
ActionMailer::Base.delivery_method = @previous_delivery_method
ActionMailer::Base.deliver_later_queue_name = @previous_deliver_later_queue_name
end
test "parameterized headers" do
assert_equal(["[email protected]"], @mail.to)
assert_equal(["[email protected]"], @mail.from)
assert_equal("So says [email protected]", @mail.body.encoded)
end
test "enqueue the email with params" do
assert_performed_with(job: ActionMailer::Parameterized::DeliveryJob, args: ["ParamsMailer", "invitation", "deliver_now", { inviter: "[email protected]", invitee: "[email protected]" } ]) do
@mail.deliver_later
end
end
test "respond_to?" do
mailer = ParamsMailer.with(inviter: "[email protected]", invitee: "[email protected]")
assert_respond_to mailer, :invitation
assert_not_respond_to mailer, :anything
invitation = mailer.method(:invitation)
assert_equal Method, invitation.class
assert_raises(NameError) do
invitation = mailer.method(:anything)
end
end
end
| 32.145455 | 195 | 0.75 |
1a53feaec4705d8feba96b5e5830d4bc88884c66 | 1,576 | CookieClickerGenerator::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
end
| 42.594595 | 85 | 0.77665 |
28b292d13729aa25173e378d767a9e6994e7e90c | 278 | class Seed < ActiveRecord::Base
store :payload, accessors: [:uri, :cache, :headers, :delimiter, :server, :host, :port, :database, :username, :password, :query, :selectors, :checked]
store :memory
has_many :seed_mapping
has_many :agents, :through => :seed_mapping
end | 34.75 | 152 | 0.705036 |
ed5be7961f80a09fcadabcacfa3370e992a03fbd | 301 | begin
require 'rspec'
rescue LoadError
require 'rubygems'
gem 'rspec'
require 'rspec'
end
$:.unshift(File.dirname(__FILE__) + '/../lib')
$:.unshift(File.dirname(__FILE__) + '/../lib/hudson_erb')
require 'hudson_erb'
require 'mocha'
RSpec.configure do |config|
config.mock_with :mocha
end
| 16.722222 | 57 | 0.704319 |
1a8374afb894aa8f2220f71a007c27c6d26ac89b | 1,193 | require File.dirname(__FILE__) + '/spec_helper.rb'
describe Rhino::ColumnFamily do
before do
@page = Page.new('yahoo.com', :title=>"Yahoo!", :contents=>"<p>yahoo</p>", :meta_author=>'filo & yang', :meta_language=>'en-US')
end
it "should structure columns properly even before saving them to the db" do
pending
end
it "should present a ColumnFamily object" do
@page.meta_family.class.should == Rhino::ColumnFamily
end
it "should list the columns underneath a column family" do
@page.meta_column_names.sort.should == %w(author language)
end
it "should list the columns' full names underneath a column family" do
@page.meta_family.column_full_names.sort.should == %w(meta:author meta:language)
end
it "should determine column names" do
@page.meta_family.column_names.sort.should == %w(author language)
end
it "should determine column names in a family correctly when the column names contain extra colons" do
@page.set_attribute('links:https://com.google/', 'link text')
@page.links_family.column_names.should == %w(https://com.google/)
end
it "should be able to initialize using a have_one association" do
end
end
| 32.243243 | 132 | 0.715842 |
e8a14773b7d78fb2a9e6189bcf2ffee5c9b3d01e | 605 | require './viewer'
Zootrope.new.trace
class Baseball
def toss(receiver)
receiver.catch_ball(self)
end
end
class Player
attr_accessor :ball
def catch_ball(ball)
self.ball = ball
end
def toss(receiver)
ball.toss(receiver)
self.ball = nil
end
def has_ball?
!!ball
end
end
class BallGame
attr_reader :players, :ball
def initialize(*players)
@players = players
@ball = Baseball.new
end
def play
players.first.ball = ball
i = 0
until players.last.has_ball?
players[i].toss(players[i + 1])
i += 1
end
true
end
end
| 12.87234 | 37 | 0.639669 |
6af77b77fdb952038ec4c3f020ffb7ee8b206fcf | 360 | # typed: false
class RemoveParentChildAndAddFolderFromFolderContent < ActiveRecord::Migration
def change
begin
add_column :folder_contents, :folder_id, :integer
rescue
puts "Column folder_id already added to folder_contents"
end
remove_column :folder_contents, :parent_id
remove_column :folder_contents, :child_id
end
end
| 27.692308 | 78 | 0.758333 |
f85a4c690b9781eac62bb94936361b68ed3fe0a0 | 1,440 | class RancherCli < Formula
desc "Unified tool to manage your Rancher server"
homepage "https://github.com/rancher/cli"
url "https://github.com/rancher/cli/archive/v2.4.12.tar.gz"
sha256 "7fb2fb7d8e210198b0829de169ccec1ba882c872ec8cd28ae905ab4460e94fc7"
license "Apache-2.0"
head "https://github.com/rancher/cli.git", branch: "master"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "81f0d67c3cc237d67246ed1e0b9f74c25365e907e45f272fa642d012170657df"
sha256 cellar: :any_skip_relocation, big_sur: "6579774482291cf6fc8bb1bdae68335e248e4ce2a35c858645f1d7e86164731e"
sha256 cellar: :any_skip_relocation, catalina: "02ad8e0cf289919361a10c7021d8b730d447753a2a2aa5ed2605da10047021ca"
sha256 cellar: :any_skip_relocation, mojave: "3241f896a3d9d31be2053ccbdfccb1d6e817318c65a6c17008dd0042f37de2ad"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5a7812410b2f76b8d5a9df9cefb53a8b0db2feb37de6fce3758416a6aab79d9e"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args(ldflags: "-s -w -X main.VERSION=#{version}"), "-o", bin/"rancher"
end
test do
assert_match "Failed to parse SERVERURL", shell_output("#{bin}/rancher login localhost -t foo 2>&1", 1)
assert_match "invalid token", shell_output("#{bin}/rancher login https://127.0.0.1 -t foo 2>&1", 1)
end
end
| 43.636364 | 122 | 0.750694 |
1a6513984787bb3091f09d434a3670b79ab911b5 | 1,278 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# [START container_v1_generated_ClusterManager_GetServerConfig_sync]
require "google/cloud/container/v1"
# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::Container::V1::ClusterManager::Client.new
# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::Container::V1::GetServerConfigRequest.new
# Call the get_server_config method.
result = client.get_server_config request
# The returned object is of type Google::Cloud::Container::V1::ServerConfig.
p result
# [END container_v1_generated_ClusterManager_GetServerConfig_sync]
| 37.588235 | 76 | 0.786385 |
28a19d338e91318c3a48c1b38be855c82adff71f | 13,675 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/googleapis/googleapis/blob/master/google/ads/google_ads/v1/services/geo_target_constant_service.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google
# engineers.
require "json"
require "pathname"
require "google/gax"
require "google/ads/google_ads/v1/services/geo_target_constant_service_pb"
require "google/ads/google_ads/v1/services/credentials"
module Google
module Ads
module GoogleAds
module V1
module Services
# Service to fetch geo target constants.
#
# @!attribute [r] geo_target_constant_service_stub
# @return [Google::Ads::GoogleAds::V1::Services::GeoTargetConstantService::Stub]
class GeoTargetConstantServiceClient
attr_reader :geo_target_constant_service_stub
# The default address of the service.
SERVICE_ADDRESS = "googleads.googleapis.com".freeze
# The default port of the service.
DEFAULT_SERVICE_PORT = 443
# The default set of gRPC interceptors.
GRPC_INTERCEPTORS = []
DEFAULT_TIMEOUT = 30
# The scopes needed to make gRPC calls to all of the methods defined in
# this service.
ALL_SCOPES = [
].freeze
GEO_TARGET_CONSTANT_PATH_TEMPLATE = Google::Gax::PathTemplate.new(
"geoTargetConstants/{geo_target_constant}"
)
private_constant :GEO_TARGET_CONSTANT_PATH_TEMPLATE
# Returns a fully-qualified geo_target_constant resource name string.
# @param geo_target_constant [String]
# @return [String]
def self.geo_target_constant_path geo_target_constant
GEO_TARGET_CONSTANT_PATH_TEMPLATE.render(
:"geo_target_constant" => geo_target_constant
)
end
# @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc]
# Provides the means for authenticating requests made by the client. This parameter can
# be many types.
# A `Google::Auth::Credentials` uses a the properties of its represented keyfile for
# authenticating requests made by this client.
# A `String` will be treated as the path to the keyfile to be used for the construction of
# credentials for this client.
# A `Hash` will be treated as the contents of a keyfile to be used for the construction of
# credentials for this client.
# A `GRPC::Core::Channel` will be used to make calls through.
# A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials
# should already be composed with a `GRPC::Core::CallCredentials` object.
# A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the
# metadata for requests, generally, to give OAuth credentials.
# @param scopes [Array<String>]
# The OAuth scopes for this service. This parameter is ignored if
# an updater_proc is supplied.
# @param client_config [Hash]
# A Hash for call options for each method. See
# Google::Gax#construct_settings for the structure of
# this data. Falls back to the default config if not specified
# or the specified config is missing data points.
# @param timeout [Numeric]
# The default timeout, in seconds, for calls made through this client.
# @param metadata [Hash]
# Default metadata to be sent with each request. This can be overridden on a per call basis.
# @param exception_transformer [Proc]
# An optional proc that intercepts any exceptions raised during an API call to inject
# custom error handling.
def initialize \
credentials: nil,
scopes: ALL_SCOPES,
client_config: {},
timeout: DEFAULT_TIMEOUT,
metadata: nil,
exception_transformer: nil,
lib_name: nil,
lib_version: ""
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "google/gax/grpc"
require "google/ads/google_ads/v1/services/geo_target_constant_service_services_pb"
credentials ||= Google::Ads::GoogleAds::V1::Services::Credentials.default
if credentials.is_a?(String) || credentials.is_a?(Hash)
updater_proc = Google::Ads::GoogleAds::V1::Services::Credentials.new(credentials).updater_proc
end
if credentials.is_a?(GRPC::Core::Channel)
channel = credentials
end
if credentials.is_a?(GRPC::Core::ChannelCredentials)
chan_creds = credentials
end
if credentials.is_a?(Proc)
updater_proc = credentials
end
if credentials.is_a?(Google::Auth::Credentials)
updater_proc = credentials.updater_proc
end
package_version = Gem.loaded_specs['google-ads-googleads'].version.version
google_api_client = "gl-ruby/#{RUBY_VERSION}"
google_api_client << " #{lib_name}/#{lib_version}" if lib_name
google_api_client << " gapic/#{package_version} gax/#{Google::Gax::VERSION}"
google_api_client << " grpc/#{GRPC::VERSION}"
google_api_client.freeze
headers = { :"x-goog-api-client" => google_api_client }
headers.merge!(metadata) unless metadata.nil?
client_config_file = Pathname.new(__dir__).join(
"geo_target_constant_service_client_config.json"
)
defaults = client_config_file.open do |f|
Google::Gax.construct_settings(
"google.ads.googleads.v1.services.GeoTargetConstantService",
JSON.parse(f.read),
client_config,
Google::Gax::Grpc::STATUS_CODE_NAMES,
timeout,
errors: Google::Gax::Grpc::API_ERRORS,
metadata: headers
)
end
# Allow overriding the service path/port in subclasses.
service_path = self.class::SERVICE_ADDRESS
port = self.class::DEFAULT_SERVICE_PORT
interceptors = self.class::GRPC_INTERCEPTORS
@geo_target_constant_service_stub = Google::Gax::Grpc.create_stub(
service_path,
port,
chan_creds: chan_creds,
channel: channel,
updater_proc: updater_proc,
scopes: scopes,
interceptors: interceptors,
&Google::Ads::GoogleAds::V1::Services::GeoTargetConstantService::Stub.method(:new)
)
@get_geo_target_constant = Google::Gax.create_api_call(
@geo_target_constant_service_stub.method(:get_geo_target_constant),
defaults["get_geo_target_constant"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'resource_name' => request.resource_name}
end
)
@suggest_geo_target_constants = Google::Gax.create_api_call(
@geo_target_constant_service_stub.method(:suggest_geo_target_constants),
defaults["suggest_geo_target_constants"],
exception_transformer: exception_transformer
)
end
# Service calls
# Returns the requested geo target constant in full detail.
#
# @param resource_name [String]
# The resource name of the geo target constant to fetch.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Ads::GoogleAds::V1::Resources::GeoTargetConstant]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Ads::GoogleAds::V1::Resources::GeoTargetConstant]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/ads/google_ads"
#
# geo_target_constant_client = Google::Ads::GoogleAds::GeoTargetConstant.new(version: :v1)
# formatted_resource_name = Google::Ads::GoogleAds::V1::Services::GeoTargetConstantServiceClient.geo_target_constant_path("[GEO_TARGET_CONSTANT]")
# response = geo_target_constant_client.get_geo_target_constant(formatted_resource_name)
def get_geo_target_constant \
resource_name,
options: nil,
&block
req = {
resource_name: resource_name
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Ads::GoogleAds::V1::Services::GetGeoTargetConstantRequest)
@get_geo_target_constant.call(req, options, &block)
end
# Returns GeoTargetConstant suggestions by location name or by resource name.
#
# @param locale [Google::Protobuf::StringValue | Hash]
# If possible, returned geo targets are translated using this locale. If not,
# en is used by default. This is also used as a hint for returned geo
# targets.
# A hash of the same form as `Google::Protobuf::StringValue`
# can also be provided.
# @param country_code [Google::Protobuf::StringValue | Hash]
# Returned geo targets are restricted to this country code.
# A hash of the same form as `Google::Protobuf::StringValue`
# can also be provided.
# @param location_names [Google::Ads::GoogleAds::V1::Services::SuggestGeoTargetConstantsRequest::LocationNames | Hash]
# The location names to search by. At most 25 names can be set.
# A hash of the same form as `Google::Ads::GoogleAds::V1::Services::SuggestGeoTargetConstantsRequest::LocationNames`
# can also be provided.
# @param geo_targets [Google::Ads::GoogleAds::V1::Services::SuggestGeoTargetConstantsRequest::GeoTargets | Hash]
# The geo target constant resource names to filter by.
# A hash of the same form as `Google::Ads::GoogleAds::V1::Services::SuggestGeoTargetConstantsRequest::GeoTargets`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Ads::GoogleAds::V1::Services::SuggestGeoTargetConstantsResponse]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Ads::GoogleAds::V1::Services::SuggestGeoTargetConstantsResponse]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/ads/google_ads"
#
# geo_target_constant_client = Google::Ads::GoogleAds::GeoTargetConstant.new(version: :v1)
#
# # TODO: Initialize `locale`:
# locale = {}
#
# # TODO: Initialize `country_code`:
# country_code = {}
# response = geo_target_constant_client.suggest_geo_target_constants(locale, country_code)
def suggest_geo_target_constants \
locale,
country_code,
location_names: nil,
geo_targets: nil,
options: nil,
&block
req = {
locale: locale,
country_code: country_code,
location_names: location_names,
geo_targets: geo_targets
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Ads::GoogleAds::V1::Services::SuggestGeoTargetConstantsRequest)
@suggest_geo_target_constants.call(req, options, &block)
end
end
end
end
end
end
end
| 47.982456 | 160 | 0.605484 |
5d7344308a7a8227128f824d9c90ac57a3ca55cc | 115 | require "the_bullet/generator/version"
module TheBullet
module Generator
# Your code goes here...
end
end
| 14.375 | 38 | 0.73913 |
f7e5ab71769bea20a385656f52c1c68e034943be | 25,563 | module DayTwo
def self.call
puts "Running #{self.to_s}"
exercise_1
exercise_2
end
private
def self.exercise_1
puts "exercise 1"
parser = DayTwo::Parser.new
validator = DayTwo::Validator.new
valid_pwds = self.input.filter { |line| validator.validate(:occurrences, *parser.parse(line)) }
puts "Valid passwords: #{valid_pwds.length}"
end
def self.exercise_2
puts "exercise 2"
parser = DayTwo::Parser.new
validator = DayTwo::Validator.new
valid_pwds = self.input.filter { |line| validator.validate(:position, *parser.parse(line)) }
puts "Valid passwords: #{valid_pwds.length}"
end
def self.example_input
"1-3 a: abcde
1-3 b: cdefg
2-9 c: ccccccccc".split("\n").map { |num| num.strip }
end
def self.input
"8-9 n: nnnnnnnnn
14-15 d: dzjgbdwdkdhdddh
16-17 m: mmmmmmmmmxmmmmwmm
7-17 d: pmgdhpqbqdhshgtjt
1-2 b: mvmgzbmfbrcnr
16-17 c: cccccccccccqcccgcc
1-6 x: ctqxsh
1-5 l: lllvw
9-10 k: kkkkkkklrsnk
7-9 g: xsgzggbglgtm
3-4 b: brwb
4-5 x: xdxlfx
5-6 c: cccccc
4-7 v: vvvvvdpx
2-7 s: qwdngzbtsntgzmxz
6-8 s: ssssssssssss
2-3 t: tjcttt
2-4 z: nbgfl
11-12 z: gnjzwzwzzwcbhv
4-5 v: tvkfvnvvvvcv
1-3 h: hhbhh
15-16 g: gggggggggggggggv
6-9 t: ftjztttfnztv
2-5 v: vxpmqvvzvnglvvv
2-4 r: rsxq
3-4 k: kxkk
2-3 k: kkkk
3-6 t: ktttztt
1-2 v: zvcvv
14-16 b: bbbbbbbbbbbbbrbb
7-10 b: sgmbgmxbxnpjgpthcbr
5-13 r: wmfmgmxxwgbrpllfqnjj
3-4 t: ftcd
2-3 z: szzf
1-6 j: cxjjbj
2-3 t: ttvt
2-6 h: hhkszjhjjxhqhrhn
3-4 s: ssns
4-8 x: xcjmwxxxlrqxxfx
3-10 g: ggvggggkgrggggggxg
2-6 z: lzmhmvftsvcjfhfw
9-14 x: xxxxxxxxhxxxxxxxxx
8-16 d: dddddddddfdddddtd
1-2 g: wrhg
2-7 r: krjrxmrrvhzrprr
11-18 v: vvwvdwslcxvtkzvvkr
9-10 m: mmmxfmmmmmmmmmmz
7-12 g: ggktlgggsqpgw
6-8 t: tttttbtpz
8-18 h: nfhhwkthhxhhcshhwh
10-16 c: smnclksccmnmcmzcc
14-15 f: nmndkphffnqmffj
4-10 f: ffcwqvfffffvfg
6-13 v: vvnvvcvxwxfvvvvvvvv
1-4 k: qklkkkspwkxfk
7-9 w: wwwwwwwwk
9-10 c: ccccccccbpc
11-14 r: qrxrjbbtrxrrrr
2-5 j: wckjjdjjjzvhsqr
10-12 x: xxxxxxxxxcxxx
3-8 l: vnlslkhlwdd
4-12 n: nxnfnqnnzmngnn
4-5 z: zzgpzzc
4-5 z: zgmtbz
2-3 v: vkvv
11-12 q: mqrqmlfqqqqlq
6-14 g: lggzggfxggsggqggxg
3-8 q: sqzqrbgvzp
6-7 l: lllklll
5-7 c: czrccvcclm
11-17 r: rrwrrrrrrdrrrrrrbrrr
2-4 x: xsxx
3-9 l: llllllllwl
3-19 c: ccccccccjccccvczcccc
5-6 h: zhvhhhhghb
1-3 t: ttttvttwt
1-5 p: ppppppp
3-4 g: ssvgr
5-6 t: ndjbtttnmf
14-16 f: fwptdqwwpztqhfnswts
1-2 h: ffvmvbhmh
2-3 f: gdrz
1-2 l: slll
3-5 w: zwwwrwwwwwwww
1-3 w: fszt
8-9 j: jjjjjjjjrj
4-9 k: kkkskkkkkkk
4-9 p: sppzppqpplpp
2-9 s: ssvxrrjhs
11-14 v: cvvvvvvvkvvvvvmv
1-10 q: qqqcmqfjqs
3-7 z: sgzzfzkzz
5-8 w: wwwwhwrztw
3-10 p: phbbmpzhpppspdfr
2-8 c: cscpcccg
2-4 r: rsvr
14-16 h: hhhhhhhhhhhhhhhh
1-13 j: jjjjjjjjhjxjxjjjj
7-8 v: vvvvvvvvv
4-8 r: vbhlmwrrvmkrwn
3-4 c: ccdc
3-5 h: hbjxx
3-4 q: qqdqq
10-14 k: dkkwnrkkhkwzhkkkk
1-3 z: zzzzzzm
3-7 t: tbtxfttvw
6-18 n: qntgnnnztkhwtnjrnm
4-5 g: gggrng
2-7 c: bclzgcgq
1-8 j: jbjjjjjj
2-3 x: jxxfbtxskxc
5-7 g: kgszqgcgxgqtppgb
1-6 l: llllltll
3-4 g: brgm
12-16 x: xsxxxxqpxxvxmxjxxx
6-7 k: kkkkkkk
3-9 f: fftbvjfvfffstzfff
14-15 r: khlwrfrcnrkddrh
2-4 h: hwhqm
4-5 z: zzzzd
7-10 b: bbbbbbpbbt
8-10 h: hhhhhhhdhn
5-6 c: bccclt
7-11 p: pppppppppppp
4-6 r: rwrrrrrr
9-17 m: mmmmmmmmmmmmmmmmmmm
3-9 n: nnnfnnnnjnn
3-4 q: rqqcq
1-2 z: mbzz
1-6 h: hhhhhs
10-11 h: whhhrvtghhh
5-6 g: ggggjz
5-8 g: gggbgggvg
15-16 v: vvvvvvvvvvvrvvtv
5-7 q: qwrzqbllqqqrgkqnlsr
2-4 c: ccccc
7-14 p: spppxpppphblprm
4-9 j: qzxlxdzsdlnlhw
2-4 l: lzphl
16-17 g: ggggggsgggggggxpqggs
3-17 d: ddddddddddddldddcddd
10-12 n: qtkgqxknvjnnsbnrwtp
8-13 d: dddddddcddsdkdd
7-16 v: wkntvjdvvwvfvnqvm
2-5 w: vwhjw
2-3 f: dmgff
2-5 p: nklmmzp
4-5 x: vxxxxd
1-2 m: mmmqm
9-12 r: rrrrrrrrqrrm
6-9 r: ldvhvrjdkqhmr
2-9 f: fmkffptffzdgk
5-12 x: xxxxxxxxxxxbxxxxx
3-4 w: bwwqgwt
10-11 f: fffffffffhfff
10-13 n: nnfnmnhnnnnnmn
12-13 n: nnnnnnnnnnnnnn
8-13 z: rzzfbzzdrzlkn
13-14 p: qgkxppfppfjcnx
3-4 z: bcxdctzg
3-9 z: zznzwzzmffqxj
1-2 n: nnnznr
2-6 s: mtspsskswf
2-15 g: ggggggggggggggqg
3-8 x: tzxqfjrkpdgkxptbxm
15-19 v: dvcvvvvvvvvvlvqvvvv
4-9 r: dszrbrbzrwlfxpcrkn
6-7 f: fffffsmf
5-8 v: vdvvvvrv
10-11 m: mmmmrmmmmmhmmm
9-12 s: stwssszcsssr
8-15 j: jjbjjjjjjjjjjhj
6-8 v: vvvvvhvv
1-7 w: ghxwddwwbfchvtff
6-9 v: vvtxxrbqpvdvvv
3-5 q: fqfqrskl
15-17 k: kkkkzkkkkvbkkksrlkkk
2-18 w: rtfwsrwcbpwtwjzmwwws
5-7 f: pfshgsnff
8-20 h: jkkhhhssfppkjqdfdhtp
2-7 b: bblqbfhbbbb
3-6 j: wjmjjxjj
4-13 d: dfzdkqpddgchbhtgg
4-7 x: xxxxtsn
5-6 m: mmmmmmtm
4-5 p: pkmppck
1-14 j: jpjjwjjkjjmxkjwqj
3-4 s: qxsfvs
8-9 k: kkkwkzxrvkkkk
13-15 r: qzrzrrbrxrcrzjpr
1-4 l: lmvlj
1-4 l: mrll
11-15 w: wqwwwnwwwwtwwfwwjww
8-9 g: fggzsggmggggccm
2-5 f: vggqfcffxh
2-4 c: cwcccr
11-12 c: ccccccccccfc
3-5 g: ggmggg
10-11 l: llllllllrll
5-6 v: wvdjvv
6-7 p: ppppphkp
7-10 c: tcccccnccjcccc
8-10 m: mmmrmmlgmt
3-14 j: ljsjjjjtjjjjjhj
10-16 c: kcjcccccccccnvtccccc
1-9 n: tnjnnnnnnnn
3-7 h: mhdpxhhh
15-16 c: ccccccgcccccccgcc
6-8 t: tvkddtrt
3-4 k: fkhjk
9-11 v: bqmhnjvpvxr
7-9 x: xxxxxxsxpx
16-17 h: hgjzqhvqhsgcplwmhvth
16-17 p: ppppppppppppppkqvp
8-11 l: lllllzlbclslt
4-18 k: kkkkkbkkkkkqkkkkkks
1-8 b: cxbvpxbzbsz
1-12 f: ffffffffffffffff
13-19 d: ddddddddddddkddddddd
9-11 v: mtvvvvvvhvgvv
10-11 t: tttttttttdtttt
5-9 g: gglgggpgdlgtcqg
7-10 m: mzmmmmmmrn
5-6 q: qqqqsq
8-11 f: ffrcfbfnwffzfpxfbvf
7-10 f: ffvfffxbpfffmjfqb
2-3 r: rrrr
6-7 x: bxxxxxgj
3-5 j: jljndr
4-14 q: qqqzqqqqqqpqqdqqqqqq
3-6 n: nnnnntnnn
15-16 g: ggggggggggggghggg
13-16 x: jxxzqxvxfhxxvggxdqx
4-6 f: zfxfkfhmsfgjf
9-12 r: rrrrrrrrgrrnrrrrv
3-7 w: wczsqxwdw
9-12 k: hxgkkklkkkkwkkk
1-4 s: sssns
5-6 v: vvdvlv
12-14 g: gsgggggggggggkgg
5-16 r: rlkqflvvtfcdckfrn
18-19 c: cfncccccccccccccclzc
4-8 n: nbntnnnvnnnnnn
3-6 l: jfpxllfrbll
4-14 k: xphrkstmqmqhkw
2-10 b: rrsrbbqvbr
5-7 k: wkbgkkkqfkkkk
5-6 k: kkkkkkk
7-10 k: mqrsjkksskswqz
11-14 x: kxxxxxcxxxnxpbdx
2-15 h: hhhhhhhhhhhhhhh
5-12 p: pppjpvgppppppppprv
2-11 l: lzlllllllpdlll
5-7 t: xwtzttvctqtpvbltpbtt
2-18 k: ckhhplvxfrdqkbjhbkpp
12-14 k: rkkqvkfngkkqjn
3-7 n: mtxnkbnw
3-11 z: zzzbzzzqzmzzczzzz
3-4 m: mmmq
2-7 s: ssssskcnssgnkzd
3-4 g: rmgrggd
2-3 h: grhrtpcch
2-6 d: dddddddv
3-7 g: jggggggggrfggggg
5-6 v: ljztvvqrcsjdnlwv
2-6 x: vxxtwk
2-8 v: vvvvvvvvjv
3-5 h: dwhwv
2-7 n: ttxgbdbwnnnvnnx
2-4 c: cctfcsb
16-17 c: ccccccccgcxckccxccc
5-6 x: xxxxxx
2-6 p: pwndpfrpppppppppppkp
1-4 h: zhhjlh
2-5 p: prpxwppljppvwjtppkp
2-6 j: bjjjjjjf
16-20 p: pmppppbpppppppppsdpn
1-3 b: bbbh
8-11 k: nkxskkbkfjvcjkdcgrk
7-12 c: cqcdcccgchnd
5-7 m: mmmmmmmmmmmmmmm
4-12 j: kjdjjvrjxsjw
7-16 c: dfzcsdcdgrxvfkjzccmw
1-12 m: mmmmmmmmmmmmmmmmzmm
1-2 s: sssws
1-5 x: xrtxs
3-4 w: wvwv
6-7 s: sssssss
11-12 m: lbwtvjjmmggm
16-17 l: llllllllllllwllht
11-13 t: qtttttgtttttdmt
2-5 r: ldgrf
13-18 r: rrrrrrrrrrrrrrrprr
12-19 l: lnfnhllrjgcllhjlczl
6-7 c: clccccv
9-14 b: shlfbcxhbbmbbzbg
4-6 s: sjssvxdx
11-12 v: vvvvjppqkvqv
5-7 j: jjjjmjjjj
8-11 l: dsldfrdvbll
3-4 p: pkvmcps
2-3 s: ssss
8-11 j: txbxnpwqxjm
4-6 d: drdvdd
4-8 v: xgwkxvfq
1-5 t: zttttttttqmtt
5-8 c: ccjccccqc
9-18 z: zzzzzzzzzzzzzzzmsgzz
1-4 q: qqqkq
10-11 w: wwwwwwwwwwww
5-9 r: prtdrksjbmzmrq
1-3 v: bvvnsmrdgxmnxjjv
6-11 d: drnkdpvdvhfrfgzkn
5-9 h: khhrhhhtvhtznhf
1-3 g: gfpgn
4-5 p: pppfg
8-19 n: nnnnnnnrnnnnnnnnnnzn
6-13 c: cztfcwpjcczsc
3-4 r: rrrg
4-6 t: tfjvtv
4-7 g: nggggggg
2-3 c: cmkc
7-15 r: jrrrrrrrrrrrrrwtrrqr
8-12 h: hjtpfjpbbzwhw
7-8 d: jddddddd
1-9 s: lsssssssmss
18-19 m: mmmmmlmkmmtdwmmmmmh
5-9 t: ttzstqtmt
7-11 x: xxxwxxxxkxxxxgfxxx
2-3 s: dssjzpsxjr
3-4 g: gcgtg
3-4 f: ffxff
9-11 v: tvvvvvfslvvfvv
1-7 n: nmlfmnz
5-6 q: gqzqtqlq
4-5 v: pvvvczdtwthvc
1-3 z: zzrd
6-7 b: sbbmbnbbrp
3-7 l: lkllvqplww
7-9 s: sssssssss
1-8 g: gggkvggsgg
5-6 n: nhnplpl
2-9 g: vgnfzjfvjmmtgd
1-4 d: dcddddddqdh
1-9 z: zzzzzzzzzz
8-10 q: wqhcwqdqqq
7-18 z: tfrfdpwpphzplxfbnw
7-10 f: rffnjdnffmfffrzfqgz
6-7 p: pppppppp
2-13 n: cndfbtxgqzmtnmq
14-19 v: vvvbvvvvvpvvvsvvvvvv
8-11 b: bbblbtwbbbbbb
3-5 v: rfvvv
15-17 n: snmnnnnnwnnnnnnnnnn
4-12 q: sqqqhwnbqqxqbvfrk
4-6 l: lllllvl
17-18 c: cccccccccccccccccc
1-12 t: gttttttxnttzttt
16-17 t: kfmtctsqttztmsxtf
7-14 f: ffffffcjffcfjfff
5-19 b: jtvbbbrbbbjbcsbbvvb
7-8 g: gggggggk
2-4 w: dwvwww
1-3 r: rrrrrrm
7-8 q: qqcqqqqq
6-9 m: tvglftcxmmnshcvscp
3-6 m: cmgmmdmm
6-9 k: kkkdkwzkgk
2-15 w: bwqwwhpwvfpdwdvjwvk
7-8 h: jdhhnhrdghh
3-6 l: llxllpl
4-11 q: ggrjmwhrqdc
7-8 w: cwcwwwwwww
5-6 s: sssssls
6-9 w: rwjwjwwww
1-8 c: ccncccccccccc
2-4 b: xkbbf
6-9 t: ttttttttrt
14-15 h: hbhhhhhhhhhhhhw
14-17 t: ttttgtttdttttttttt
4-7 q: gqmqrqr
9-11 j: tjjjsjwjglpjjjkjjg
18-19 j: jjjjjjjjjjjjjjjjjjf
15-17 p: ppppvhppppppppppp
3-5 d: jlqcqdnprnddpd
6-8 n: nnnnnnnn
5-7 h: cwhhthhhv
3-5 z: wqzmzpbjstzjpfww
2-9 c: fkthkshccccczcph
11-12 c: hcsscmqcdcgp
8-9 t: tttttttkq
3-4 m: tnmcvtljnpdphg
11-19 l: wqjvlvzrgxlvrngwlktj
6-14 s: dxzsssssmzgsmssss
4-7 c: ccckjchc
8-9 s: ssssssrslsm
16-17 b: bxbbbbbbbbbbbbbbz
3-12 p: mpnpplpkppmbp
14-15 v: vvvvvvvvvvvvtvv
5-6 d: kttkndlslhfg
3-5 w: tlwwl
3-4 n: wnhn
13-14 r: zrbmrrrrrrrrmrrr
3-5 f: fzwgrmzpkcfqkftmz
5-6 s: ssssss
5-7 h: wxpzhhzgwhdxqhmhw
8-12 j: djsgjqjjjjjdj
13-15 h: hhhrhhlhhpjhhhr
6-7 v: vwvmvhw
1-9 k: dmwkkkkzk
1-2 g: ggtkbgvgtvggx
10-18 r: rrrrrrrrpkhrrrrrrxrs
5-10 n: nxbslftnwpnn
4-5 d: ddhdd
12-15 l: lllllllllllllhlll
2-3 m: mmmmmmmmmmmmmmm
1-8 s: ssshsspm
3-5 b: nblhbdbbb
2-4 t: qtjtctndq
2-14 v: vcvkvvttxvkvpvhvvd
12-16 g: gqckxjtggwzgtlqrtjkf
3-10 s: ssvssssrsssbss
2-4 n: knln
4-10 f: xfqlfffbflm
1-3 s: mssshjgshpfrssrss
4-5 t: nttbj
2-13 w: lwqwmwhthzrtwkgp
1-3 r: jrnqb
3-13 k: khkkkvkkfkkqkkhfwzf
4-8 g: gggwgggg
3-4 g: wgjgxhngdt
7-9 k: mgshkgnhkpkjsmkvjp
7-9 d: gdjrfqndcdhddmdhdd
17-18 g: gnggggggggggggggng
14-15 g: ggrgggggggggggg
2-7 x: sxpwxddpxnxtxft
5-6 r: grdvrrrrrwrrrrrgvq
4-5 f: ffffff
8-9 f: fffxffzgff
12-15 f: dfffffffpffgffjlfmvf
10-14 l: dlflltmllplljllvlzll
1-2 j: jtnftndqklxzj
1-5 d: ddjwrdddddddhdddddz
13-14 j: lgztjqgjfbjfxff
5-7 k: kkklqkz
15-18 m: hwvmmpmmpqwsmrwmmmsm
5-6 h: hlvhhhqhlh
2-4 j: jjldjjjx
10-11 f: ffffffffffffff
15-16 t: ttttttttttttmtjttt
3-4 v: vvvv
8-9 n: nndhnclnnnnnrwnn
5-6 r: rxrrrrfrr
4-18 p: vwmppgpzltpplvwvpg
9-10 z: fzzzzzzzrzzz
4-6 n: nnbnvnnn
10-11 x: xxgxxxwxxfrwk
3-14 v: tqvvvjvdvvvzkvvvvv
11-13 z: zzmzzzzhrzrzzzzzp
5-6 s: ssgwsvssqs
2-5 z: zbzfzzds
2-5 s: sssss
1-18 r: rrrrlrrrrrgrrrrgrr
2-3 v: vgvvvvv
3-13 c: qdcvfvkjtczgb
3-9 h: hvhhhhhshh
12-13 x: xdxxxxxxxxxxxx
3-17 q: qqqqqqqqqqqqqqqqcq
12-14 q: khnjqkkxlzpqhqj
14-17 t: tttttttttttttsttpt
15-16 d: kwpdfkbhcdhkwsdh
2-15 m: fmdvntdqklcvmhmqjl
5-7 q: qqqqvqnqqq
3-4 j: mnjjj
4-15 f: lfbffxfpffsfbdfhpfs
8-9 s: sssssssgr
12-14 b: bbbbbbbbbrsbbnbb
2-3 x: xbxpvc
10-14 v: vvvvvvvvvvvvvx
6-13 f: kfvfcfffvfhdn
4-16 k: gnzkfdkmrjvwdjgkf
2-5 l: lljll
2-4 r: dprr
1-20 v: vvvvvhvvvvvvvvvvvvvc
1-2 l: mlzljlvllkpzll
4-13 r: kbzrzdrmdvfcl
6-7 v: vvpvvzvv
6-16 h: dxfwghscmjgfqmmf
6-9 h: hhthhfhhdh
6-10 j: bvqrjjjfdtjjjjj
4-7 k: gklkcrvvkk
2-8 k: ndtdhjpb
13-15 f: ffffffffffffffb
6-7 m: mmmmmmzm
2-15 g: xbgxgfgftvgvgmcgnxc
4-8 q: xkrqvqqq
4-9 v: vvvvvvvtbv
8-16 w: wwwxwxwbbwwwtdww
8-11 h: hdhhhhhcght
3-4 f: ffsb
12-13 h: hhhhghhhdmpfhh
6-7 f: ffdmgfh
1-2 c: pcsc
4-6 q: lkzqqmqqq
3-4 b: bbjg
3-6 f: lmbzffl
10-15 p: prxpdpwqpwplpppp
10-12 p: pppppppppppfp
7-8 m: mmmmmmmjmq
5-8 j: jjjfjbjtjj
8-10 t: tttttttmtm
5-11 k: kkkrhkkkkkk
2-3 c: jcccwcbwfrckb
3-10 n: qnnnnnfnnknnnnfnxn
12-13 h: hhhhhhhhhhlhh
9-10 s: smspmjsssss
6-7 d: dddddmdnd
9-10 g: kpnbgstjjgtbhk
9-13 s: ssssssmshsbsxss
1-2 s: lsfsss
8-9 n: nnnnngnnn
9-10 s: sssfssnssssjs
6-8 m: mmxmmmmmm
4-6 x: xtxjxx
8-11 r: qrqkrwbfjln
9-10 h: hfhhhhhhvq
6-10 k: kpkpkjkkkvk
7-10 b: bbbbhbkmmgbvk
8-10 k: kkkkkkkkplvmmk
1-2 r: rrwr
2-5 d: tdddrddgq
7-9 j: jjjjjjjjfj
15-17 m: mmmmmmmmmmmmmmmmj
5-6 v: vvcvvvfwjvvtn
14-18 t: kznfxrrtfcdntttjrt
3-4 c: cjdcc
17-19 q: qqqqqqqqqqqqqqqqfqh
4-5 k: fzvkk
3-7 p: pgppppppppfpp
10-11 k: kkkkkkvkkkz
6-7 t: tttjcttt
2-4 w: kwlcwbgfwwtqpngnb
4-5 q: kqqvlq
16-17 q: qqqqsbqqqqqqqqqbqq
19-20 q: lndjjddqcqkcjztwfxvw
4-6 p: pppmdccpp
13-15 x: xxxxxxxxxxxxhxw
1-9 d: lmdsdfbjpkmh
8-15 v: vthvwvjvcvvqvvhmvs
6-7 r: qxrrrrpr
3-6 n: cxzxnn
9-11 l: lllbhllhllbll
5-9 d: nbsqdctdkp
1-10 l: lllllllllflllllll
1-5 f: cdsfqzwfnjkhdj
2-5 m: clmgsmmdz
1-6 g: cbqgngzvjpxjbwnlprq
6-8 l: bfxrllllllw
6-9 v: qtcxqvmvvdptxvv
3-4 p: fjqx
5-8 t: tpnttxtlt
5-6 p: pppppf
12-13 q: qqqlqqqqqqqqw
5-6 g: hgggvx
1-2 x: qxxx
6-8 n: nkjnnpnnn
6-8 j: jjjjjjjfjjjjjjjj
4-5 s: sbsjm
2-6 m: lmtmlm
3-6 c: bzvhcccncz
3-5 l: lllglnlmpl
2-9 p: hpbwppwdrlpfwbdkmj
7-10 l: tlzwhnlrkrc
4-6 m: xmmmqdmcl
1-6 d: jkdmmd
5-7 c: cjqlhcdcc
4-10 z: xgtznmnznkzvz
13-15 z: zzzzzzzchzzzqzzzz
2-5 q: qhvqqz
1-9 l: nlllllllzlllllllll
4-5 q: qqqzh
5-7 t: ttpttdt
3-11 l: lmnxlthlmlzksggzll
2-5 q: qprnq
16-18 b: bbbbbbbbbbbhbbbtbgn
3-4 l: rlhl
8-9 n: nwdfntdnsnmzkqthl
6-8 h: hhhhhthv
1-2 x: xcxp
7-11 t: nttttctzxmtt
2-4 b: ksvb
17-18 d: dddddddddddddddpqd
3-5 x: lprxzdhxxvz
7-8 z: zgzzzzlzz
2-3 p: rcpxw
11-17 k: qkkkkkkkthkkkkkmkkk
4-6 s: ssrssswxsssmsk
5-8 d: ddddddddddd
5-17 v: vvtvvvtvdvvvvvvvmcw
3-8 r: rhvfrfcr
12-15 z: zzzzzzzzzzzpzzpzz
7-8 p: jppppvpp
13-18 d: dvdnddddddddgddsdd
10-11 q: hqnqvdpqdqqkq
2-4 x: jcxxl
6-7 q: qqfhfqjqr
10-11 w: kwpwwhwnwwwwwww
3-17 d: dddddddkddhdddddd
4-6 s: ssnhswswlsc
14-17 j: jhjjjrzjjmjfpzjjjjj
1-9 z: ztzhzwzzkdzzr
7-14 s: dcspnghrssxsss
5-12 w: ctbvvxzwbwgqw
7-9 k: kwkkqkkkgkm
11-15 l: gllclqplllrlnlpzllc
15-16 p: hpbpppppjpppppppppp
3-5 k: cklnfv
5-7 t: jczctqm
14-17 j: jjjjjhjjjjjjjsjjjj
9-10 g: gsknxggrgblfprf
5-6 m: zjmmqrflmfpmswcvvmtl
8-9 d: dxdddddds
16-20 w: wwwwwwwwwwwwwwwdwwww
5-7 s: ssssnsbs
5-8 z: bzbgzvzz
2-5 b: wbpkbbnkhcndh
11-13 r: rrrrrrrrrrkrsr
12-17 c: ksffzpccccspsqvjc
2-20 v: vvvtvjvjvvvvvvvvstvv
3-4 z: tzszjqzz
3-12 x: scpxgxqrtxxpjxn
13-16 w: wwwwnwjnzwwwwwwwwwwr
9-12 l: zxlrflvclgck
2-8 s: klsnwsdzbss
5-9 s: ssssssssns
1-9 q: fqqqqqqqqqqqqqqq
15-17 n: jknfnsvntjqglbnxn
15-16 h: gbpdwhhhhlthrjhh
5-7 w: cjwgkxdwwxl
1-5 j: jmnczjrlqchdwpthbjst
6-8 v: vvvfxkvvvv
5-8 g: ggbgqggmhg
3-15 v: vxvvfzvvwcxvjvvqvvq
6-14 h: drppknzrchcjhh
2-3 n: dgntnq
9-14 s: ssssssssxmsssxssssss
2-4 j: wjrdcjz
5-12 n: dntnnvbfjnznn
3-4 z: zzmnzbsrt
6-9 n: kngngnnbcnhp
6-13 k: tmsskkjkkvtksnm
2-4 w: rwhf
6-7 p: ppppppwdpvp
2-8 r: prqvprbrvrsjxm
1-8 f: gffffffff
6-8 b: lbmtblbptbxfdqb
4-10 r: rrsrrrnrrbmrngrk
10-17 b: bbbbbbbbbmbbbbjbhbb
2-13 v: tmvqcvvwvvqvzvdx
13-19 d: hrtlzplwdkshdbsjmsd
6-13 w: wwwwwwwwwwwwdw
14-16 q: hlkhpvdqrkhkqlhhlg
7-8 m: mmmmmmmx
11-12 p: pppfnpqppppp
4-5 k: kkclzkfk
8-9 k: krkkdkkkk
15-16 w: wwxwwwcwwwwwwwjw
12-16 w: wwwtmwnwqdwvjpwzwwz
5-9 d: wdddrdddrdkdkddd
5-6 p: qppbpvp
1-7 t: fgznlbtgwtnrlhgf
4-10 j: jjjjjjjjjljjj
3-6 z: spgznzcz
11-12 h: xhhhkfhhhhmh
3-12 g: ckgvgbrgcqrggggt
1-6 k: kkkkkjk
2-3 j: qzbf
3-4 r: rrlprrrrrrrrdrrrrr
13-19 l: llllllpllllllllllll
6-10 x: xxxxxjxxxxnx
2-12 f: jffffbffqftf
5-8 b: wbgbgbsb
4-8 t: jflltrxfgttqvrgt
2-5 z: fzzzszcwzz
3-6 w: whwwbnw
12-15 n: nndnnsnnnttnnnnnnn
4-9 c: mcsstwccc
3-4 q: dxwq
6-11 v: swmrlccvxzdrffvvwj
10-11 x: xxmxxxxxxmx
1-2 k: fkqk
15-16 d: nlnzddfrjqvdjwddmw
2-6 k: ldkdmnmcp
1-11 p: fptzpgpzfhp
12-14 b: bbbbbbbbbbtbbx
6-12 w: wwwpbwwmxwxz
1-4 r: mrrrrrr
13-18 w: wwrwgwrlwwwwwwwwwlw
16-19 z: szzzjzzmzzzzzzzzzzsc
2-7 v: qvvcsvd
7-10 s: mtskmnvnlcgsb
6-7 q: mkdqqqq
3-5 x: xwxmx
7-9 q: qsqjfqcqwnq
1-4 r: crcb
14-18 k: lfwklgqjzgxfpkpkfqr
9-14 j: jjjjjrjzjjjjjfj
5-6 x: xxxxxxh
1-17 z: gvczzzzqdqzzzzzmwkzd
11-14 k: dkkklkkpkkkktkkkkkk
9-12 j: jjjjjjjjqjjv
2-6 d: dthddmddd
4-5 v: qvfjfvvdlszqxvbl
4-5 n: nnndn
7-8 c: wqnmpcwf
1-10 c: cvgmnkcssrhqccmddgh
8-9 m: lmmtmxmsm
3-6 v: qvvnqvvrfhv
6-7 x: vxxxxmkjxxxxxxxxxxx
5-14 x: qxjgbxxxxprxxn
13-14 p: dphsqvhrfsbpqpp
3-5 r: trvrrxrrr
2-5 f: fffff
2-6 w: whwdwnwt
2-3 x: wxcsxx
10-14 p: pswtzbkslpqppph
3-5 l: zllllx
9-11 g: ggggggggggxgg
4-6 h: fvhhfhhfjqffhhhwx
7-8 t: wttttttf
11-14 q: qqqqvqqxzcqpqhqkpqtf
5-9 b: bsrbbbxkm
13-15 t: tttttkwtttttgttwt
4-7 c: czccvxtc
9-10 n: nwnnnnnnnn
3-5 f: ffdtf
13-14 h: hhzhhhhhhhzhphh
9-11 g: kgsfsnhsggglgsp
5-6 s: hbxpnsssvx
10-11 t: ltsrtmvmkgm
12-18 k: xlwkkkkktkkkbkkrjxkk
5-7 l: lnllwlw
16-17 k: kkkkkkklkpkkkkkknkk
11-12 t: ttttttttttttt
11-13 v: vvvvvvvgvvvvvvv
10-14 d: ddvddjdddkdddd
3-4 r: nrdrhnr
1-4 q: pqqxcsws
5-6 p: ppppjpp
14-15 b: bmbflqmbzhkbqvb
9-11 t: ttntttttnpt
15-16 l: llsllldflhblwlllf
5-10 w: swwwwnwncwtfrpnw
6-7 j: jjjjjthb
2-4 d: ddhdzfdddlp
2-9 q: jqqqqbqqqzq
19-20 b: bbbgbdbkbbhbsdpbbtbh
7-9 v: hvvvxvvvv
6-7 l: gsbslll
2-6 p: pzpppwpppppppppp
10-11 g: kmkngxnnrfh
1-11 v: rvdvvvvvvvv
2-4 n: nhndnnnnqn
12-13 q: qqqqqqqqqqqqm
7-9 s: ssssxsnsssss
2-15 t: ftntkfmqxzxggcfwb
1-7 v: vvwvvvqdvv
6-14 s: sbsssdssssssfqsss
3-8 w: vwwqwwww
6-10 z: zzzzzzczkzzz
5-8 b: bbvdmxqbbbbbbbbdv
4-8 s: stvsssszcswszs
12-13 x: xxxxxxxxxxxxxx
5-15 d: ngzjdxddjddxvbwzbd
1-10 s: ssssssssss
10-14 r: rnhrrrrrrrznrrr
1-7 z: nvzzzcvzzmzhnprzkz
2-9 w: zfmxnmltsbxpfsh
13-14 l: lllllblllllldgl
3-9 b: dbbrkmmfsmhblqb
1-6 t: tjmrctqggzdgtz
1-14 m: bmmmmmmmmmmmmmmmmmm
3-4 b: mwbndzbjp
9-11 n: nnnnnnccmnb
5-11 b: kpcbqbbbcmbkb
3-6 j: wpjjspjw
6-12 p: pppppppppppdppp
1-4 c: rccc
3-9 l: llllxllbllll
6-14 w: tlpcwxwwwzbtwd
3-4 q: qqqg
3-5 x: htxcvqkwx
15-17 r: rrrrrrrrrwxrrrrrr
8-10 k: kkkkkrkwkkw
3-4 s: jjwhksqrbxs
3-4 g: gvggg
11-13 t: tjgttvtdtbttttttqttr
5-6 j: jjjjmjjj
9-13 f: fffffffflffff
13-14 z: ccbzvzwmzpzzmz
6-7 b: bbjtbbbbbb
3-5 k: pkknmv
3-4 w: wmchw
4-7 b: bbbgbbbbb
10-13 q: qrqqqvxbplqqxqqqmqq
1-8 b: rbbbbbbjbbbcqbb
7-17 q: xqqqqqqqqqqqqqqtq
4-6 k: kqkkbbg
6-14 l: lglhbvslmhqjnw
4-6 w: pwwxxn
12-16 q: tvtqvcchgtmqqznb
8-14 q: hkthhbpjvkpxgq
11-13 m: smmffkhhkrmwnbwrs
8-11 d: dddddddddddd
4-6 m: lmdmmkwmmmfbs
12-20 w: wwwwwwwwwwwbwwwwwwsb
5-7 m: mrnhmplhwczr
7-11 p: pppppdbpppvpc
4-10 p: zpwsppfppj
3-12 d: dddddpfddrdln
8-9 g: chfcgzfgg
5-6 g: gggghg
7-8 q: hqqqxqqqqqqqqqg
4-10 c: cdtpvcccvd
7-8 s: sqwtsxsssss
5-11 v: pcvvrvxvvnpjn
4-6 z: hzmnzg
4-6 b: bslbbbxbbwml
16-18 z: zzzzzzzzzzzzzzzzzg
1-3 h: hhkh
2-4 r: wrtrb
5-6 d: ddddpd
4-7 k: kkkkkkkkkxk
8-13 k: pkkkkkkkkkgkkd
8-13 p: lpxppxpbpgppzppmppp
3-9 m: mmmmmmmmmmmmmm
6-7 h: cpmmhcttgx
11-15 b: bbbbbbqbpkfbbzh
4-7 j: jjqvjjj
6-8 c: gwrfrslcsqmcttzcl
11-13 r: mrrrrrpgrrnrrrr
3-8 q: tqqqqqqz
2-4 f: mtsxj
1-17 g: cgggggsgggdgcgrxg
1-9 m: mhgqjqxjnpmmmgnmfpm
2-4 g: rggbc
2-4 w: wgfw
7-9 d: qdmddddddmxdd
12-13 t: ttmjznwtwtttz
11-12 b: kjbkbgdbwbmcb
4-5 h: mhvhh
4-6 t: ttvttttt
9-12 r: zwlljhfrrfrr
3-4 j: jjjj
2-7 v: vvvvvvmvv
9-19 n: nnnnnnnnmnnnnnnnnnqn
1-3 q: qfflx
8-10 k: kkkkkkknkk
4-9 k: kvrspqfkkqj
14-15 b: bbbbbbbbbbbbbbbb
3-8 d: qrdsfdwd
5-8 j: jjxmrtjjjjkpnnjt
14-15 f: fnlnbzqffrrnffgf
1-4 f: fnff
4-5 x: xxxtx
5-6 q: qtqqlmqqq
14-15 f: fflffdfffffffkjfff
1-5 k: kkqkvkk
5-9 b: nbjtwbbgw
13-19 v: ssjjggrvrnbpvjlggsf
8-10 x: xxxxmxxxxx
4-5 d: vdwhtcsc
1-7 l: zlllllglll
2-5 d: drjpdwrlnpfqqrnlh
12-13 p: fthsszdpjcqxd
14-17 z: zzczzzzzzzzzzzzznzz
6-11 m: hhcsqmvmfgmkmbm
7-8 m: hmmmmmcmm
6-7 l: lllllgl
5-10 t: tttktdzttlcttv
6-12 m: nmmmpmmmxtmmjmxm
4-5 q: qnqnnqqqqqqq
1-4 c: zkpcc
5-6 n: nbphjnfn
6-7 f: ffffxqrf
1-6 g: lgggggggg
5-11 p: mbfpppplhdp
5-8 z: jzvzzkzm
5-7 l: llllllll
5-14 x: gvjvwmhxtzpsfxb
4-13 x: tvlfjxmpfrmqmkhrdlbl
8-11 b: nbbbbrcbbbb
8-10 w: ccccwwgwwwwl
2-13 p: wwwxrsfztddwplfpqx
17-18 c: ccccccccvccccccccj
11-13 d: dphdtwhqwdhmx
15-17 c: ccccccccwcccccccdc
3-4 d: ggjv
3-4 b: bbqbbqbx
2-3 g: qggg
10-18 w: wpbbfmdzhsnbcdxqrh
13-20 v: vpvvvvvvrvvvvsvvvvxv
16-17 w: vwwqhwwwfhwgdkhfw
16-19 b: bbbbbbbbbzpbbkbkbbjb
11-16 x: hczxsbkhxfbtxvzx
10-17 b: bbbbbbbbbsbbbbbbvbb
2-3 p: pppxsznnn
2-7 g: ggggsgggl
15-17 t: tttttttthttttttttttt
7-17 f: fffsfftfffffffffffqf
3-7 k: gpfmqbkqmkkkrpkpk
4-7 j: xpbgxjw
14-15 f: ffffftknfklfdff
1-4 k: pkkslkkkkk
4-7 w: czbwgqtpwztxwwbwmqf
1-5 n: tnnnnn
1-5 q: qdqtq
10-15 c: fccqccrkcccczcmcc
9-14 h: hhbbhhhhdhhhhhdhhh
2-6 f: fsbfdcvffhffm
12-15 h: jhhhhhhhhhhhhhs
9-11 p: ppppppppppbp
15-16 d: txdbddddjdsdbrvddddp
5-6 w: wqkwrv
2-4 z: zzjnsjspkgdkmvvlh
13-14 z: zzbzzzzzzzkzzzz
9-10 v: vvvvvvvvvcv
13-15 g: gggggggggpqgtgv
1-17 j: bjjjjjjgjjjjjjjjjjj
5-7 x: dpsxxxx
17-20 d: ddvxddtdhdrddjddnddd
5-6 f: ffxffp
12-14 w: wwwwwgwwwwwwwq
8-13 t: rnthvkbtnxvltsjn
4-5 q: qqqdq
4-12 s: ssssssssssszs
3-4 s: sssw
2-5 x: kzxwxqljv
1-3 x: rhxxdxxxdg
11-12 t: tthtstttgztrt
1-4 n: qzdnkpkrzgxdztdcfn
12-15 g: fgdzsqwkfdhrbsjz
1-3 g: kggg
6-9 c: hcwcccpcccwcck
8-9 r: rqrlrggwrrjfzgkmrq
4-5 r: ghzlrrrgs
9-11 g: ggggggdggggr
12-14 f: fffffffffffcffffff
3-7 z: mgzmpjz
1-6 h: hhzzbm
18-19 g: ggfhhvggffkqfgjnggg
1-8 p: ppppppphwppwr
13-15 l: qmjnjfnnlmbqgllwhk
5-13 z: zgpwzhwnmptrzrq
3-4 v: xnvnqjjrvvlgv
14-15 p: pppgpphppppppwpnrpr
9-13 h: hxfhhshbqhnhhp
5-6 w: wkvqbtbtfmsbwkwwhdwb
1-3 x: shxxx
1-2 v: vhvt
5-8 c: zdrlnhcctcpccsntl
12-19 x: xxxxppxxxmfglxfxmxg
3-4 v: vfqwq
8-9 f: fffffffxx
11-14 l: lslvlnbsjlbllmqllfl
3-9 j: njhfczjmmn
3-5 q: scqrdqq
2-16 q: tvgkgqqcpqmpjkqqqqqb
11-13 q: vdrmqfqqqqqqdjqq
3-8 j: jftcskds
10-11 j: ljjwfjtxqjh
7-16 z: zqtcldzxqmzmgzbz
6-14 v: vvvvvvvvvjvvvvv
1-13 q: qdrqgpcqzbpqftws
6-8 d: mdgddbtdddzd
1-6 g: gggjgggggg
1-4 d: djddbhddkdtkvt
11-12 l: lllllllllllll
1-9 g: gggggnvgm
7-17 v: vvvvvvxvvvvvvvvvv
7-11 m: mmmmmmmmmmh
2-4 m: cmfmcpm
4-5 m: mmmmjm
6-9 q: qzcqxbvqfk
1-2 s: ssnssms
2-4 m: xmmz
15-18 h: rtzvhbrpsrbhxqchbz
5-6 w: jwwwwnwt
4-5 m: jmrmwfqmrmtgmm
8-11 p: pkpjpmpppxzpp
1-7 s: sssspssszss
3-6 l: lclllllknlm
1-2 p: pwrwlc
7-9 s: ssssssxhfss
5-9 l: llllllllflllgl
12-19 n: clkznzskfvxngwnnclpx
14-18 n: nnnnnxnnnnnnnnnnngnn
1-8 m: vwbpmmsxmvbwsggqgxd
10-11 q: nqqqqgqfqpf
4-15 b: fctbwzqnwbnvqbqlb".split("\n").map { |num| num.strip }
end
end
require "advent_of_code_2020/puzzles/day_two/validator"
require "advent_of_code_2020/puzzles/day_two/parser" | 24.67471 | 99 | 0.636271 |
0181314fafc89dcd877fb03dbeb5bc6c08570c0e | 255 | class AddOptionsToAdvancedSearchConfigurations < ActiveRecord::Migration[5.2]
def up
add_column :advanced_search_configurations, :options, :jsonb, :null => true
end
def down
remove_column :advanced_search_configurations, :options
end
end
| 25.5 | 79 | 0.780392 |
f7a92266d5ef7075c113a92d32cd621e379f4fc0 | 1,422 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require 'test_helper'
module Elasticsearch
module Test
class XPackGetRollupIndexCapsTest < Minitest::Test
context "XPack Rollup: Get index caps" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
assert_equal "foo/_rollup/data", url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
subject.xpack.rollup.get_rollup_index_caps :index => 'foo'
end
end
end
end
end
| 34.682927 | 79 | 0.708158 |
abe44faf165370012bb0b4c20b36ea624d7048b8 | 740 | Sequel.migration do
change do
create_table :tests do
# Keys
primary_key :id
foreign_key :participant_id, :participants, null: false, on_delete: :cascade
foreign_key :statement_id, :statements, null: false, on_delete: :cascade
# Data
Boolean :statement_read, null: false, default: false
Boolean :statement_shown, null: false, default: false
Boolean :completed, null: false, default: false
# Timestamps
DateTime :created_at, null: false
DateTime :updated_at, null: false
# Index
index [:participant_id, :statement_id], unique: true
index [:statement_read]
index [:statement_shown]
index [:completed]
end
end
end
| 28.461538 | 84 | 0.647297 |
3879e12b41a1f2c5b74be2ff89913643fbe9adce | 3,033 | require "ios_localizer/version"
require 'uri'
require 'net/https'
require 'json'
require 'cgi'
require 'htmlentities'
module IosLocalizer
class HelperMethods
# setup regular expressions
$regex = /^".*" = "(.*)";$/
# Helper functions
def getDataFromURL(url)
uri = URI.parse(url.strip)
http = Net::HTTP.new(uri.host,uri.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
lr = http.request(Net::HTTP::Get.new(uri.request_uri))
JSON.parse (lr.body)
end
def directory_exists?(directory)
File.directory?(directory)
end
def extract_word(line)
$regex.match(line)[1]
end
def should_line_be_translated (line)
$regex.match(line) != nil
end
end
def IosLocalizer.localize(key, proj_dir, source, skip)
h = HelperMethods.new
source_strings_file = proj_dir + "/" + source + ".lproj/Localizable.strings"
#Get languages
lurl = 'https://www.googleapis.com/language/translate/v2/languages?key=' + key
ldata = h.getDataFromURL (lurl)
if ldata.has_key? "error"
if ldata["error"]["errors"][0]["reason"] == "keyInvalid"
puts "ERROR: Invalid API Key"
exit
else
puts "ERROR: web service error"
exit
end
end
if ldata.count == 0
puts "Invalid API key"
exit (1)
end
languages = Array.new
ldata["data"]["languages"].each do |language|
unless language["language"].eql? source
unless skip.include? language["language"]
lproj_dir = proj_dir + "/" +language["language"] +".lproj"
if h.directory_exists?(lproj_dir)
languages << language["language"]
end
end
end
end
#Generate source language file
gen_strings_command = "find " + proj_dir + "/ -name *.m -print0 | xargs -0 genstrings -o " + proj_dir + "/" + source + ".lproj"
system(gen_strings_command)
#Get words that need to be translated
words = Array.new
comment_str = "/*"
src = File.open(source_strings_file, "rb:UTF-16LE")
while (line = src.gets)
line = line.encode('UTF-8')
if h.should_line_be_translated(line)
words << h.extract_word(line)
end
end
#Prepare Translate Query
turl = "https://www.googleapis.com/language/translate/v2?key=" + key + "&source=" + source
words.each do |word|
turl += "&q=" + CGI::escape(word)
end
#Translate Words & write new file
coder = HTMLEntities.new
languages.each do |lang|
lang_strings_file = proj_dir + "/" + lang + ".lproj/Localizable.strings"
dest = File.open(lang_strings_file, "w")
translate_url = turl + "&target=" + lang
tdata = h.getDataFromURL(translate_url)
i = 0
src = File.open(source_strings_file, "rb:UTF-16LE")
while (line = src.gets)
line = line.encode('UTF-8')
if h.should_line_be_translated(line)
extracted_word = (h.extract_word(line))
line = line.reverse.sub(extracted_word.reverse, coder.decode(tdata["data"]["translations"][i]["translatedText"]).reverse).reverse
dest.syswrite(line)
i += 1
else
dest.syswrite(line)
end
end
end
end
end
| 22.804511 | 134 | 0.66238 |
3872ca7cd2dd3ae0a6e079964e5f5386f3b7a4c5 | 775 | # Cookbook Name:: kernel-modules
# Author:: Jeremy MAURO <[email protected]>
#
# Copyright 2016, Criteo.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chefspec'
require 'chefspec/berkshelf'
Dir[File.join(__dir__, 'support/**/*.rb')].sort.each { |f| require f }
| 33.695652 | 74 | 0.741935 |
1cc20316e864e1880f70ce2d6f3193c16ad0bdca | 829 | # frozen_string_literal: true
require 'spec_helper'
if Puppet::Util::Package.versioncmp(Puppet.version, '4.5.0') >= 0
describe 'Stdlib::IP::Address::V6::CIDR' do
describe 'accepts ipv6 addresses in cidr format' do
[
'FF01:0:0:0:0:0:0:101/32',
'FF01::101/60',
'::/0',
].each do |value|
describe value.inspect do
it { is_expected.to allow_value(value) }
end
end
end
describe 'rejects other values' do
[
'FEDC:BA98:7654:3210:FEDC:BA98:7654:3210',
'FF01:0:0:0:0:0:0:101',
'FF01::101',
'12AB::CD30:192.168.0.1',
'127.0.0.1',
'10.1.240.4/24',
].each do |value|
describe value.inspect do
it { is_expected.not_to allow_value(value) }
end
end
end
end
end
| 23.685714 | 65 | 0.547648 |
7948d353bc7cd465292fef60687b77ffb4ee22d8 | 1,094 | require 'test_helper'
class ShowsControllerTest < ActionController::TestCase
setup do
@user = users(:one)
sign_in @user
@show = shows(:hamlet)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:shows)
end
test "should get new" do
get :new
assert_response :success
end
test "should create show" do
assert_difference('Show.count') do
post :create, show: { description: @show.description, title: @show.title }
end
assert_redirected_to show_path(assigns(:show))
end
test "should show show" do
get :show, id: @show
assert_response :success
end
test "should get edit" do
get :edit, id: @show
assert_response :success
end
test "should update show" do
put :update, id: @show, show: { description: @show.description, title: @show.title }
assert_redirected_to show_path(assigns(:show))
end
test "should destroy show" do
assert_difference('Show.count', -1) do
delete :destroy, id: @show
end
assert_redirected_to shows_path
end
end
| 21.038462 | 88 | 0.677331 |
7ade9ca5085de051c6d8b26713ade0b49063c647 | 1,683 | # frozen_string_literal: true
module Gitlab
module Ci
module Build
module Policy
class Refs < Policy::Specification
def initialize(refs)
@patterns = Array(refs)
end
def satisfied_by?(pipeline, context = nil)
@patterns.any? do |pattern|
pattern, path = pattern.split('@', 2)
matches_path?(path, pipeline) &&
matches_pattern?(pattern, pipeline)
end
end
private
def matches_path?(path, pipeline)
return true unless path
pipeline.project_full_path == path
end
def matches_pattern?(pattern, pipeline)
return true if pipeline.tag? && pattern == 'tags'
return true if pipeline.branch? && pattern == 'branches'
return true if sanitized_source_name(pipeline) == pattern
return true if sanitized_source_name(pipeline)&.pluralize == pattern
# patterns can be matched only when branch or tag is used
# the pattern matching does not work for merge requests pipelines
if pipeline.branch? || pipeline.tag?
regexp = Gitlab::UntrustedRegexp::RubySyntax
.fabricate(pattern, fallback: true, project: pipeline.project)
if regexp
regexp.match?(pipeline.ref)
else
pattern == pipeline.ref
end
end
end
def sanitized_source_name(pipeline)
@sanitized_source_name ||= pipeline&.source&.delete_suffix('_event')
end
end
end
end
end
end
| 29.526316 | 80 | 0.562686 |
bb44b1c9d5f98090709ccf6aaea1e42c897f98aa | 57 | module Naturally
# Gem version
VERSION = '2.2.0'
end
| 11.4 | 19 | 0.666667 |
ac6f80b3210fe5c2c5b07404083e4edc213dca4d | 1,296 | # frozen_string_literal: true
ENV['RACK_ENV'] ||= 'test'
require 'simplecov'
SimpleCov.start {
add_filter '/spec/'
add_filter '/test_helpers/'
}
require 'bundler/setup'
require 'rspec/expectations'
require 'pry-byebug'
require 'rspec/expectations'
require 'capybara'
require 'capybara/rspec'
require 'capybara/spec/spec_helper'
require 'capybara_test_helpers/rspec'
Dir[File.expand_path('spec/support/**/*.rb')].sort.each { |f| require f }
require File.expand_path('test_helpers/base_test_helper.rb')
RSpec.configure do |config|
Capybara::SpecHelper.configure(config)
# Avoid the reset in Capybara::SpecHelper
config.before(:each) { Capybara.default_selector = :css }
config.after(:each) { Capybara.default_selector = :css }
# Example: Include commonly used test helpers by default on every feature spec.
config.include(DefaultTestHelpers, type: :feature)
config.include(SupportFileHelpers)
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
end
| 25.411765 | 81 | 0.753086 |
3394efc5da46ee3063667c30349a626f06a7526f | 8,894 | =begin
#NSX-T Manager API
#VMware NSX-T Manager REST API
OpenAPI spec version: 2.5.1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXT
# Traffic statistics for a logical switch.
class L2VPNPerLSTrafficStatistics
# Total number of outgoing packets.
attr_accessor :packets_out
# Total number of incoming packets dropped.
attr_accessor :packets_receive_error
# Total number of incoming bytes.
attr_accessor :bytes_in
# Total number of incoming Broadcast, Unknown unicast and Multicast (BUM) packets.
attr_accessor :bum_packets_in
# Total number of outgoing Broadcast, Unknown unicast and Multicast (BUM) bytes.
attr_accessor :bum_bytes_out
# Logical switch
attr_accessor :logical_switch
# Total number of outgoing bytes.
attr_accessor :bytes_out
# Total number of packets dropped while sending for any reason.
attr_accessor :packets_sent_error
# Total number of outgoing Broadcast, Unknown unicast and Multicast (BUM) packets.
attr_accessor :bum_packets_out
# Total number of incoming packets.
attr_accessor :packets_in
# Total number of incoming Broadcast, Unknown unicast and Multicast (BUM) bytes.
attr_accessor :bum_bytes_in
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'packets_out' => :'packets_out',
:'packets_receive_error' => :'packets_receive_error',
:'bytes_in' => :'bytes_in',
:'bum_packets_in' => :'bum_packets_in',
:'bum_bytes_out' => :'bum_bytes_out',
:'logical_switch' => :'logical_switch',
:'bytes_out' => :'bytes_out',
:'packets_sent_error' => :'packets_sent_error',
:'bum_packets_out' => :'bum_packets_out',
:'packets_in' => :'packets_in',
:'bum_bytes_in' => :'bum_bytes_in'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'packets_out' => :'Integer',
:'packets_receive_error' => :'Integer',
:'bytes_in' => :'Integer',
:'bum_packets_in' => :'Integer',
:'bum_bytes_out' => :'Integer',
:'logical_switch' => :'ResourceReference',
:'bytes_out' => :'Integer',
:'packets_sent_error' => :'Integer',
:'bum_packets_out' => :'Integer',
:'packets_in' => :'Integer',
:'bum_bytes_in' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'packets_out')
self.packets_out = attributes[:'packets_out']
end
if attributes.has_key?(:'packets_receive_error')
self.packets_receive_error = attributes[:'packets_receive_error']
end
if attributes.has_key?(:'bytes_in')
self.bytes_in = attributes[:'bytes_in']
end
if attributes.has_key?(:'bum_packets_in')
self.bum_packets_in = attributes[:'bum_packets_in']
end
if attributes.has_key?(:'bum_bytes_out')
self.bum_bytes_out = attributes[:'bum_bytes_out']
end
if attributes.has_key?(:'logical_switch')
self.logical_switch = attributes[:'logical_switch']
end
if attributes.has_key?(:'bytes_out')
self.bytes_out = attributes[:'bytes_out']
end
if attributes.has_key?(:'packets_sent_error')
self.packets_sent_error = attributes[:'packets_sent_error']
end
if attributes.has_key?(:'bum_packets_out')
self.bum_packets_out = attributes[:'bum_packets_out']
end
if attributes.has_key?(:'packets_in')
self.packets_in = attributes[:'packets_in']
end
if attributes.has_key?(:'bum_bytes_in')
self.bum_bytes_in = attributes[:'bum_bytes_in']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
packets_out == o.packets_out &&
packets_receive_error == o.packets_receive_error &&
bytes_in == o.bytes_in &&
bum_packets_in == o.bum_packets_in &&
bum_bytes_out == o.bum_bytes_out &&
logical_switch == o.logical_switch &&
bytes_out == o.bytes_out &&
packets_sent_error == o.packets_sent_error &&
bum_packets_out == o.bum_packets_out &&
packets_in == o.packets_in &&
bum_bytes_in == o.bum_bytes_in
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[packets_out, packets_receive_error, bytes_in, bum_packets_in, bum_bytes_out, logical_switch, bytes_out, packets_sent_error, bum_packets_out, packets_in, bum_bytes_in].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXT.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.989547 | 178 | 0.632899 |
3974b85bebeff96bf1c6b93e3c7565b6fe38b319 | 209 | module Cwl
class ClassValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
return if value == 'Workflow'
record.errors.add(attribute, :format)
end
end
end
| 23.222222 | 51 | 0.708134 |
5dd1ae877cba8de6d6164c84e68cd33b8e4528f8 | 1,171 | class ContactsController < ApplicationController
before_filter :can_write_entries?, only: [:new, :create, :edit, :update]
before_filter :find_contacts, only: [:index]
before_filter :find_contact, only: [:show, :edit, :update]
respond_to :html, :json
# GET /contacts
# GET /contacts.json
def index
end
# GET /contacts/1
# GET /contacts/1.json
def show
end
# GET /contacts/new
# GET /contacts/new.json
def new
@contact = Contact.new
end
# GET /contacts/1/edit
def edit
end
# POST /contacts
# POST /contacts.json
def create
@contact = Contact.new(params[:contact])
flash[:notice] = 'Contact was successfully created.' if @contact.save
respond_with @contact, location: contacts_path
end
# PUT /contacts/1
# PUT /contacts/1.json
def update
flash[:notice] = 'Contact was successfully updated.' if @contact.update_attributes(params[:contact])
respond_with @contact, location: contacts_path
end
protected
def find_contacts
@q = Contact.search params[:q]
@contacts = @q.result.page(params[:page])
end
def find_contact
@contact = Contact.find(params[:id])
end
end
| 21.685185 | 104 | 0.682323 |
bbd8dba8cb94521f9604ea0e0fcb1d95f6521f98 | 812 | require 'prime'
def powerMod(b, p, m)
p.to_s(2).each_char.inject(1) do |result, bit|
result = (result * result) % m
bit=='1' ? (result * b) % m : result
end
end
def multOrder_(a, p, k)
pk = p ** k
t = (p - 1) * p ** (k - 1)
r = 1
for q, e in t.prime_division
x = powerMod(a, t / q**e, pk)
while x != 1
r *= q
x = powerMod(x, q, pk)
end
end
r
end
def multOrder(a, m)
m.prime_division.inject(1) do |result, f|
result.lcm(multOrder_(a, *f))
end
end
puts multOrder(37, 1000)
b = 10**20-1
puts multOrder(2, b)
puts multOrder(17,b)
b = 100001
puts multOrder(54,b)
puts powerMod(54, multOrder(54,b), b)
if (1...multOrder(54,b)).any? {|r| powerMod(54, r, b) == 1}
puts 'Exists a power r < 9090 where powerMod(54,r,b)==1'
else
puts 'Everything checks.'
end
| 19.333333 | 59 | 0.584975 |
bb9fa5e80b1a3055842bdf1a5cf2d30b1ff1c784 | 547 | Rails.application.routes.draw do
get 'pages/index'
post 'pages/parse_gml'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
root 'pages#index'
defaults format: :json do
constraints( id: /\d+/) do
get 'topologies/current', to: 'topologies#current'
resources :topologies, except: [:new, :edit] do
get 'nodes/count', to: 'nodes#count'
resources :nodes, except: [:new, :edit]
get 'nodes/:id/links', to: 'nodes#show_links'
end
end
end
end
| 30.388889 | 101 | 0.652651 |
874791e08c40da54786e062dba14ab37b0af2080 | 23 | module SkillHelper
end
| 7.666667 | 18 | 0.869565 |
39c355193d4cd30cf069483cc358fcaa098215a9 | 3,746 | # Ruby bindings for brotli library.
# Copyright (c) 2019 AUTHORS, MIT License.
require "brs/file"
require_relative "common"
require_relative "minitest"
require_relative "option"
require_relative "validation"
module BRS
module Test
class File < Minitest::Test
Target = BRS::File
SOURCE_PATH = Common::SOURCE_PATH
ARCHIVE_PATH = Common::ARCHIVE_PATH
TEXTS = Common::TEXTS
LARGE_TEXTS = Common::LARGE_TEXTS
BUFFER_LENGTH_NAMES = %i[source_buffer_length destination_buffer_length].freeze
BUFFER_LENGTH_MAPPING = {
:source_buffer_length => :destination_buffer_length,
:destination_buffer_length => :source_buffer_length
}
.freeze
def test_invalid_arguments
Validation::INVALID_STRINGS.each do |invalid_path|
assert_raises ValidateError do
Target.compress invalid_path, ARCHIVE_PATH
end
assert_raises ValidateError do
Target.compress SOURCE_PATH, invalid_path
end
assert_raises ValidateError do
Target.decompress invalid_path, SOURCE_PATH
end
assert_raises ValidateError do
Target.decompress ARCHIVE_PATH, invalid_path
end
end
get_invalid_compressor_options do |invalid_options|
assert_raises ValidateError do
Target.compress SOURCE_PATH, ARCHIVE_PATH, invalid_options
end
end
get_invalid_decompressor_options do |invalid_options|
assert_raises ValidateError do
Target.decompress ARCHIVE_PATH, SOURCE_PATH, invalid_options
end
end
end
def test_texts
parallel_compressor_options do |compressor_options, worker_index|
source_path = Common.get_path SOURCE_PATH, worker_index
archive_path = Common.get_path ARCHIVE_PATH, worker_index
TEXTS.each do |text|
::File.write source_path, text, :mode => "wb"
Target.compress source_path, archive_path, compressor_options
get_compatible_decompressor_options compressor_options do |decompressor_options|
Target.decompress archive_path, source_path, decompressor_options
decompressed_text = ::File.read source_path, :mode => "rb"
decompressed_text.force_encoding text.encoding
assert_equal text, decompressed_text
end
end
end
end
def test_large_texts
Common.parallel LARGE_TEXTS do |text, worker_index|
source_path = Common.get_path SOURCE_PATH, worker_index
archive_path = Common.get_path ARCHIVE_PATH, worker_index
::File.write source_path, text, :mode => "wb"
Target.compress source_path, archive_path
Target.decompress archive_path, source_path
decompressed_text = ::File.read source_path, :mode => "rb"
decompressed_text.force_encoding text.encoding
assert_equal text, decompressed_text
end
end
# -----
def get_invalid_compressor_options(&block)
Option.get_invalid_compressor_options BUFFER_LENGTH_NAMES, &block
end
def get_invalid_decompressor_options(&block)
Option.get_invalid_decompressor_options BUFFER_LENGTH_NAMES, &block
end
def parallel_compressor_options(&block)
Common.parallel_options Option.get_compressor_options_generator(BUFFER_LENGTH_NAMES), &block
end
def get_compatible_decompressor_options(compressor_options, &block)
Option.get_compatible_decompressor_options compressor_options, BUFFER_LENGTH_MAPPING, &block
end
end
Minitest << File
end
end
| 31.478992 | 100 | 0.681794 |
0307fa1554218ff74f608380e749f978099977de | 95 | class RoleGrant < ApplicationRecord
belongs_to :role
belongs_to :user, touch: true
end
| 19 | 36 | 0.747368 |
28d14e9e8e2c94a0dbf86dd8df5e3980cc76f04e | 1,233 | lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'lobbyist/version'
spec = Gem::Specification.new do |s|
s.name = 'lobbyist-ruby'
s.version = Lobbyist::Version
s.summary = 'Ruby client library for the Customer Lobby API.'
s.description = 'Access the Customer Lobby API easily with this client. See documentation at ###TBD###'
s.authors = ['David Lains', 'Shiv Indap']
s.email = ['[email protected]', '[email protected]']
s.homepage = 'https://github.com/customerlobby/lobbyist-ruby'
s.licenses = ['MIT']
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>=1.9'
s.require_paths = ['lib']
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.has_rdoc = false
s.add_dependency('faraday')
s.add_dependency('faraday_middleware')
s.add_dependency('multi_json')
s.add_dependency('activesupport')
s.add_dependency('hashie')
s.add_development_dependency('vcr')
s.add_development_dependency('bundler')
end
| 41.1 | 115 | 0.609084 |
397e52b38c3a30c06fe1ba3e801aa49e40d82eb7 | 331 | class CreateComments < ActiveRecord::Migration[5.0]
def change
create_table :comments do |t|
t.references :post, foreign_key: true, null: false
t.references :user, foreign_key: true, null: false
t.text :content, null: false
t.timestamps
end
add_index :comments, [:post_id, :user_id]
end
end
| 27.583333 | 56 | 0.676737 |
6a983669a45b1b8c67ad834f7e3702a1e01b0620 | 240 | class CreateVersions < ActiveRecord::Migration
def change
create_table :versions do |t|
t.references :component, index: true
t.string :string, index: true
t.hstore :dependencies
t.timestamps
end
end
end
| 20 | 46 | 0.675 |
01913ee32223a5b5a5bde2a802ba72dce8834cc2 | 1,496 | require 'spec_helper'
# Generators are not automatically loaded by Rails
require 'generators/rspec/model/model_generator'
describe Rspec::Generators::ModelGenerator, :type => :generator do
# Tell the generator where to put its output (what it thinks of as Rails.root)
destination File.expand_path("../../../../../tmp", __FILE__)
before { prepare_destination }
it 'runs both the model and fixture tasks' do
gen = generator %w(posts)
expect(gen).to receive :create_model_spec
expect(gen).to receive :create_fixture_file
capture(:stdout) { gen.invoke_all }
end
describe 'the generated files' do
describe 'with fixtures' do
before do
run_generator %w(posts --fixture)
end
describe 'the spec' do
subject { file('spec/models/posts_spec.rb') }
it { is_expected.to exist }
it { is_expected.to contain(/require 'rails_helper'/) }
it { is_expected.to contain(/^RSpec.describe Posts, :type => :model/) }
end
describe 'the fixtures' do
subject { file('spec/fixtures/posts.yml') }
it { is_expected.to contain(Regexp.new('# Read about fixtures at http://api.rubyonrails.org/classes/ActiveRecord/FixtureSet.html')) }
end
end
describe 'without fixtures' do
before do
run_generator %w(posts)
end
describe 'the fixtures' do
subject { file('spec/fixtures/posts.yml') }
it { is_expected.not_to exist }
end
end
end
end
| 28.226415 | 141 | 0.657754 |
1cbcb5b32940bedef1a58550375660648860f2d8 | 16,820 | # depends on: class.rb array.rb
##
# A wrapper for a calling a function in a shared library that has been
# attached via rb_define_method().
#
# The primitive slot for a NativeMethod points to the nmethod_call primitive
# which dispatches to the underlying C function.
class NativeMethod
def lines
nil
end
def exceptions
nil
end
def literals
nil
end
def line_from_ip(i)
0
end
end
##
# A linked list that details the static, lexical scope the method was created
# in.
#
# You can access it this way:
#
# MethodContext.current.method.staticscope
#
# Here is a simple example:
#
# module Fruits
# class Pineapple
# attr_reader :initialize_scope
#
# def initialize(weight)
# @initialize_scope = MethodContext.current.method.staticscope
# @weight = weight
# end
# end
# end
#
# Static scope members are shown below:
#
# irb(main):> pineapple.initialize_scope.script
# => nil
# irb(main):> pineapple.initialize_scope.parent
# => #<StaticScope:0x1c9>
# irb(main):> pineapple.initialize_scope.module
# => Fruits::Pineapple
# irb(main):> pineapple.initialize_scope.parent.module
# => Fruits
# irb(main):> pineapple.initialize_scope.parent.parent.module
# => Object
# irb(main):> pineapple.initialize_scope.parent.parent.parent.module
# => Object
class StaticScope
ivar_as_index :__ivars__ => 0, :module => 1, :parent => 2
def initialize(mod, par=nil)
@module = mod
@parent = par
end
attr_accessor :script
# Source code of this scope.
def script
@script
end
# Module or class this lexical scope enclosed into.
def module
@module
end
# Static scope object this scope enclosed into.
def parent
@parent
end
def inspect
"#<#{self.class.name}:0x#{self.object_id.to_s(16)} parent=#{@parent} module=#{@module}>"
end
def to_s
self.inspect
end
end
##
# CompiledMethod represents source code method compiled into VM bytecodes.
# Its instruction set is then executed by Shotgun's abstraction of CPU.
# CompiledMethods are not just sets of instructions though. They carry a lot
# of information about method: its lexical scope (static scope), name, file
# it has been defined in and so forth.
class CompiledMethod
# TODO: Delete/reuse cache (field 14) field from C structure
ivar_as_index :__ivars__ => 0,
:primitive => 1,
:required => 2,
:serial => 3,
:bytecodes => 4,
:name => 5,
:file => 6,
:local_count => 7,
:literals => 8,
:args => 9,
:local_names => 10,
:exceptions => 11,
:lines => 12,
:path => 13,
:metadata_container => 15,
:compiled => 16,
:staticscope => 17
def __ivars__ ; @__ivars__ ; end
##
# nil if the method does not have a primitive, otherwise the name of the
# primitive to run.
def primitive ; @primitive ; end
# number of arguments required by method
def required ; @required ; end
# Version of method: an incrementing integer.
# When you redefine method via re-opening
# a class this number is increased.
#
# Kernel methods have serial of 0
# %99.9 of the time.
def serial ; @serial ; end
# instructions set that VM executes
# instance of InstructionSequence
def bytecodes ; @bytecodes ; end
# method name as Symbol
def name ; @name ; end
# file in which this method has been defined
def file ; @file ; end
# number of local variables method uses
# note that locals are stored in slots
# in the context this CompiledMethod
# is executed in.
def local_count; @local_count; end
# literals tuple stores literals from
# source code like string literals and
# some extra stuff like SendSites,
# RegExp objects created from
# regexp literals and CompiledMethods
# of inner methods.
def literals ; @literals ; end
# Tuple holding the arguments defined on a method.
# Consists of 3 values:
# - a tuple of symbols naming required args (or nil if none)
# - a tuple of symbols naming optional args (or nil if none)
# - the symbol for any splat arg (or nil if none)
def args ; @args ; end
# Tuple holding the symbols for all local variable names used in the method.
def local_names; @local_names; end
# Tuple of tuples. Inner tuples contain
# low IP, high IP and IP of exception
# handler.
#
# When exception is raised this tuple is
# looked up by VM using context IP:
#
# Tuple which low/high IP fit in context
# IP is picked up and handling continues.
#
# TODO: double check this statement.
def exceptions ; @exceptions ; end
# Tuple of Tuples. Each inner Tuple
# stores the following information:
#
# low IP, high IP and line number as integer.
def lines ; @lines ; end
# Holds the path of a script CompiledMethod created using eval.
# Required for the proper functioning of __FILE__ under eval.
def path ; @path ; end
# Separate object for storing metadata; this way
# the metadata can change without changes to the
# CM itself.
def metadata_container ; @metadata_container ; end
# ByteArray of pointers to optcodes.
# This is only populated when CompiledMethod
# is loaded into VM and platform specific.
#
# You can think of it as of internal
# bytecode representation optimized
# for platform Rubinius runs on.
def compiled ; @compiled ; end
# lexical scope of method in source
# instance of StaticScope
def staticscope; @staticscope; end
##
# This is runtime hints, added to the method by the VM to indicate how it's
# being used.
attr_accessor :hints
def inspect
"#<#{self.class.name}:0x#{self.object_id.to_s(16)} name=#{@name} file=#{@file}>"
end
def from_string(bc, lcls, req)
@bytecodes = bc
@primitive = -1
@local_count = lcls
@literals = Tuple.new(0)
@exceptions = nil
@lines = nil
@file = nil
@name = nil
@path = nil
@required = req
return self
end
def self.from_bytecodes bytecodes, arg_count, local_count, literals, exceptions=nil, lines=nil
c = CompiledMethod.new
c.bytecodes = InstructionSequence::Encoder.new.encode_stream bytecodes
c.primitive = false
c.local_count = local_count
c.required = arg_count
c.literals = literals
c.lines = lines || Tuple[Tuple[0, bytecodes.size, 0]]
c.exceptions = exceptions || []
c
end
def inherit_scope(other)
if ss = other.staticscope
@staticscope = ss
else
@staticscope = StaticScope.new(Object)
end
end
def staticscope=(val)
raise TypeError, "not a static scope: #{val.inspect}" unless val.kind_of? StaticScope
@staticscope = val
end
def exceptions=(tup)
@exceptions = tup
end
def local_count=(val)
@local_count = val
end
def required=(val)
@required = val
end
def literals=(tup)
@literals = tup
end
def args=(tup)
@args = tup
end
def file=(val)
@file = val
end
def name=(val)
@name = val
end
def lines=(val)
@lines = val
end
def path=(val)
@path = val
end
def primitive=(idx)
@primitive = idx
end
def serial=(ser)
@serial = ser
end
def metadata_container=(tup)
@metadata_container = tup
end
def args=(ary)
@args = ary
end
def local_names=(names)
return if names.nil?
unless names.kind_of? Tuple
raise ArgumentError, "only accepts a Tuple"
end
names.each do |n|
unless n.kind_of? Symbol
raise ArgumentError, "must be a tuple of symbols: #{n.inspect}"
end
end
@local_names = names
end
def activate(recv, mod, args, locals=nil, &prc)
sz = args.total
if prc
block = prc.block
else
block = nil
end
out = Rubinius.asm(args, block, locals, sz, mod, recv) do |a,b,l,s,m,r|
run a
push_array
run b
run l
run s
run m
push :self
run r
activate_method 0
end
return out
end
# Accessor for a hash of filenames (as per $" / $LOADED_FEATURES) to the
# script CompiledMethod.
def self.scripts
@scripts ||= {}
end
# Helper function for searching for a CM given a file name; applies similar
# search and path expansion rules as load/require, so that the full path to
# the file need not be specified.
def self.script_for_file(filename)
if cm = self.scripts[filename]
return cm
end
# ./ ../ ~/ /
if filename =~ %r{\A(?:(\.\.?)|(~))?/}
if $2 # ~
filename.slice! '~/'
return scripts["#{ENV['HOME']}/#{filename}"]
else # . or ..
return scripts["#{File.expand_path filename}"]
end
# Unqualified
else
scripts = self.scripts
$LOAD_PATH.each do |dir|
if cm = scripts["#{dir}/#{filename}"]
return cm
end
end
end
nil
end
class Script
attr_accessor :path
end
def as_script(script=nil)
script ||= CompiledMethod::Script.new
yield script if block_given?
Rubinius::VM.save_encloser_path
# Setup the scoping.
ss = StaticScope.new(Object)
ss.script = script
@staticscope = ss
activate_as_script
Rubinius::VM.restore_encloser_path
end
def line_from_ip(i)
@lines.each do |t|
start = t.at(0)
nd = t.at(1)
op = t.at(2)
if i >= start and i <= nd
return op
end
end
return 0
end
# Returns the address (IP) of the first instruction in this CompiledMethod
# that is on the specified line, or the address of the first instruction on
# the next code line after the specified line if there are no instructions
# on the requested line.
# This method only looks at instructions within the current CompiledMethod;
# see #locate_line for an alternate method that also searches inside the child
# CompiledMethods.
def first_ip_on_line(line)
@lines.each do |t|
if t.at(2) >= line
return t.at(0)
end
end
return -1
end
def bytecodes=(other)
@bytecodes = other
end
def first_line
@lines.each do |ent|
return ent[2] if ent[2] > 0
end
return -1
end
def is_block?
@name =~ /__(?:(?:\w|_)+)?block__/
end
# Convenience method to return an array of the child CompiledMethods from
# this CompiledMethod's literals.
def child_methods
literals.select {|lit| lit.kind_of? CompiledMethod}
end
# Convenience method to return an array of the SendSites from
# this CompiledMethod's literals.
def send_sites
literals.select {|lit| lit.kind_of? SendSite}
end
# Locates the CompiledMethod and instruction address (IP) of the first
# instruction on the specified line. This method recursively examines child
# compiled methods until an exact match for the searched line is found.
# It returns both the matching CompiledMethod and the IP of the first
# instruction on the requested line, or nil if no match for the specified line
# is found.
def locate_line(line, cm=self)
cm.lines.each do |t|
if (l = t.at(2)) == line
# Found target line - return first IP
return cm, t.at(0)
elsif l > line
break
end
end
# Didn't find line in this CM, so check if a contained
# CM encompasses the line searched for
cm.child_methods.each do |child|
if res = locate_line(line, child)
return res
end
end
# No child method is a match - fail
return nil
end
##
# Decodes the instruction sequence that is represented by this compileed
# method. Delegates to InstructionSequence to do the instruction decoding,
# but then converts opcode literal arguments to their actual values by looking
# them up in the literals tuple.
# Takes an optional bytecodes argument representing the bytecode that is to
# be decoded using this CompiledMethod's locals and literals. This is provided
# for use by the debugger, where the bytecode sequence to be decoded may not
# exactly match the bytecode currently held by the CompiledMethod, typically
# as a result of substituting yield_debugger instructions into the bytecode.
def decode(bytecodes = @bytecodes)
stream = bytecodes.decode(false)
ip = 0
args_reg = 0
stream.map! do |inst|
instruct = Instruction.new(inst, self, ip, args_reg)
ip += instruct.size
if instruct.opcode == :set_args
args_reg = 0
elsif instruct.opcode == :cast_array_for_args
args_reg = instruct.args.first
end
instruct
end
# Add a convenience method to the array containing the decoded instructions
# to convert an IP address to the index of the corresponding instruction
def stream.ip_to_index(ip)
if ip < 0 or ip > last.ip
raise ArgumentError, "IP address is outside valid range of 0 to #{last.ip} (got #{ip})"
end
each_with_index do |inst, i|
return i if ip <= inst.ip
end
end
stream
end
##
# Calculates the minimum stack size required for this method.
#
# Returns two values:
# * The minimum size stack required
# * A flag indicating whether this is an exact size, or a minimum
def min_stack_size
dc = decode
high_mark = 0
exact = true
dc.inject(0) do |sz,op|
i,flg = op.stack_produced
sz += i
exact &&= flg
i,flg = op.stack_consumed
sz -= i
exact &&= flg
high_mark = sz if sz > high_mark
sz
end
return high_mark, exact
end
# Represents virtual machine's CPU instruction.
# Instructions are organized into instruction
# sequences known as iSeq, forming body
# of CompiledMethods.
#
# To generate VM optcodes documentation
# use rake doc:vm task.
class Instruction
def initialize(inst, cm, ip, args_reg)
@op = inst[0]
@args = inst[1..-1]
@args.each_index do |i|
case @op.args[i]
when :literal
@args[i] = cm.literals[@args[i]]
when :local
# TODO: Blocks should be able to retrieve local names as well,
# but need access to method corresponding to home context
@args[i] = cm.local_names[args[i]] if cm.local_names and cm.name != :__block__
when :block_local
# TODO: Blocks should be able to retrieve enclosing block local names as well,
# but need access to static scope
@args[i] = cm.local_names[args[i]] if cm.local_names and args[0] == 0
end
end
@ip = ip
@line = cm.line_from_ip(ip)
@stack_consumed = calculate_stack_usage(@op.stack_consumed, args_reg)
@stack_produced = calculate_stack_usage(@op.stack_produced)
end
# Instruction pointer
attr_reader :ip
attr_reader :line
##
# Returns the OpCode object
# Associated OptCode instance.
def instruction
@op
end
##
# Returns the symbol representing the opcode for this instruction.
def opcode
@op.opcode
end
##
# Returns an array of 0 to 2 arguments, depending on the opcode.
def args
@args
end
def size
@args.size + 1
end
##
# Returns the stack operands consumed by this instruction, as well as a flag
# indicating whether this is an exact value (true) or a minimum (false).
def stack_consumed
@stack_consumed
end
##
# Returns the stack operands produced by this instruction, as well as a flag
# indicating whether this is an exact value (true) or a minimum (false).
def stack_produced
@stack_produced
end
##
# Calculate the stack usage (pushes or pops) of this instruction.
def calculate_stack_usage(code, args_reg=0)
usage = code
exact = true
if code < 0
usage = 0
if code == -999
exact = false
else
# Stack usage depends on opcode args
code *= -1
mult, code = code.divmod(100)
arg, code = code.divmod(10)
if arg >= 1 and arg <= 2
# Opcode consumes/produces a multiple of the value in the specified
# opcode arg
usage += mult * args[arg-1]
elsif arg == 3
# Opcode consumes number of args specified in args register
usage += mult * args_reg
exact = false
end
usage += code
end
end
return usage, exact
end
def to_s
str = "%04d: %-27s" % [@ip, opcode]
str << @args.map{|a| a.inspect}.join(', ')
end
end
end
| 24.918519 | 96 | 0.634602 |
3340fc0f51aaf2041c2912a8161f30cee83dfa0f | 1,216 | lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'heartcheck/activerecord/version'
Gem::Specification.new do |spec|
spec.name = 'heartcheck-activerecord'
spec.version = Heartcheck::Activerecord::VERSION
spec.authors = ['Locaweb']
spec.email = ['[email protected]']
spec.summary = 'A activerecord checker'
spec.description = 'Plugin to check activerecord connection in heartcheck.'
spec.homepage = 'http://developer.locaweb.com.br'
spec.license = 'MIT'
spec.files = Dir['lib/**/*'].select { |f| File.file?(f) }
spec.executables = spec.files.grep(/^bin\//) { |f| File.basename(f) }
spec.test_files = spec.files.grep(/^spec\//)
spec.require_paths = ['lib']
spec.add_runtime_dependency 'net-telnet', '~> 0.1.1'
spec.add_dependency 'activerecord', '>= 3.2', '< 7.0'
spec.add_dependency 'heartcheck', '~> 2.0'
spec.add_development_dependency 'pry-nav'
spec.add_development_dependency 'redcarpet'
spec.add_development_dependency 'rspec'
spec.add_development_dependency 'rubocop'
spec.add_development_dependency 'sqlite3'
spec.add_development_dependency 'yard'
spec.required_ruby_version = '>= 2.3'
end
| 35.764706 | 77 | 0.722862 |
4a14dfbec564349aebac4a29a6ba0ebcb38f07b4 | 6,491 | require 'spec_helper'
describe Gitlab::Sherlock::Transaction do
let(:transaction) { described_class.new('POST', '/cat_pictures') }
describe '#id' do
it 'returns the transaction ID' do
expect(transaction.id).to be_an_instance_of(String)
end
end
describe '#type' do
it 'returns the type' do
expect(transaction.type).to eq('POST')
end
end
describe '#path' do
it 'returns the path' do
expect(transaction.path).to eq('/cat_pictures')
end
end
describe '#queries' do
it 'returns an Array of queries' do
expect(transaction.queries).to be_an_instance_of(Array)
end
end
describe '#file_samples' do
it 'returns an Array of file samples' do
expect(transaction.file_samples).to be_an_instance_of(Array)
end
end
describe '#started_at' do
it 'returns the start time' do
allow(transaction).to receive(:profile_lines).and_yield
transaction.run { 'cats are amazing' }
expect(transaction.started_at).to be_an_instance_of(Time)
end
end
describe '#finished_at' do
it 'returns the completion time' do
allow(transaction).to receive(:profile_lines).and_yield
transaction.run { 'cats are amazing' }
expect(transaction.finished_at).to be_an_instance_of(Time)
end
end
describe '#view_counts' do
it 'returns a Hash' do
expect(transaction.view_counts).to be_an_instance_of(Hash)
end
it 'sets the default value of a key to 0' do
expect(transaction.view_counts['cats.rb']).to be_zero
end
end
describe '#run' do
it 'runs the transaction' do
allow(transaction).to receive(:profile_lines).and_yield
retval = transaction.run { 'cats are amazing' }
expect(retval).to eq('cats are amazing')
end
end
describe '#duration' do
it 'returns the duration in seconds' do
start_time = Time.now
allow(transaction).to receive(:started_at).and_return(start_time)
allow(transaction).to receive(:finished_at).and_return(start_time + 5)
expect(transaction.duration).to be_within(0.1).of(5.0)
end
end
describe '#query_duration' do
it 'returns the total query duration in seconds' do
time = Time.now
query1 = Gitlab::Sherlock::Query.new('SELECT 1', time, time + 5)
query2 = Gitlab::Sherlock::Query.new('SELECT 2', time, time + 2)
transaction.queries << query1
transaction.queries << query2
expect(transaction.query_duration).to be_within(0.1).of(7.0)
end
end
describe '#to_param' do
it 'returns the transaction ID' do
expect(transaction.to_param).to eq(transaction.id)
end
end
describe '#sorted_queries' do
it 'returns the queries in descending order' do
start_time = Time.now
query1 = Gitlab::Sherlock::Query.new('SELECT 1', start_time, start_time)
query2 = Gitlab::Sherlock::Query
.new('SELECT 2', start_time, start_time + 5)
transaction.queries << query1
transaction.queries << query2
expect(transaction.sorted_queries).to eq([query2, query1])
end
end
describe '#sorted_file_samples' do
it 'returns the file samples in descending order' do
sample1 = Gitlab::Sherlock::FileSample.new(__FILE__, [], 10.0, 1)
sample2 = Gitlab::Sherlock::FileSample.new(__FILE__, [], 15.0, 1)
transaction.file_samples << sample1
transaction.file_samples << sample2
expect(transaction.sorted_file_samples).to eq([sample2, sample1])
end
end
describe '#find_query' do
it 'returns a Query when found' do
query = Gitlab::Sherlock::Query.new('SELECT 1', Time.now, Time.now)
transaction.queries << query
expect(transaction.find_query(query.id)).to eq(query)
end
it 'returns nil when no query could be found' do
expect(transaction.find_query('cats')).to be_nil
end
end
describe '#find_file_sample' do
it 'returns a FileSample when found' do
sample = Gitlab::Sherlock::FileSample.new(__FILE__, [], 10.0, 1)
transaction.file_samples << sample
expect(transaction.find_file_sample(sample.id)).to eq(sample)
end
it 'returns nil when no file sample could be found' do
expect(transaction.find_file_sample('cats')).to be_nil
end
end
describe '#profile_lines' do
describe 'when line profiling is enabled' do
it 'yields the block using the line profiler' do
allow(Gitlab::Sherlock).to receive(:enable_line_profiler?)
.and_return(true)
allow_any_instance_of(Gitlab::Sherlock::LineProfiler)
.to receive(:profile).and_return('cats are amazing', [])
retval = transaction.profile_lines { 'cats are amazing' }
expect(retval).to eq('cats are amazing')
end
end
describe 'when line profiling is disabled' do
it 'yields the block' do
allow(Gitlab::Sherlock).to receive(:enable_line_profiler?)
.and_return(false)
retval = transaction.profile_lines { 'cats are amazing' }
expect(retval).to eq('cats are amazing')
end
end
end
describe '#subscribe_to_active_record' do
let(:subscription) { transaction.subscribe_to_active_record }
let(:time) { Time.now }
let(:query_data) { { sql: 'SELECT 1', binds: [] } }
after do
ActiveSupport::Notifications.unsubscribe(subscription)
end
it 'tracks executed queries' do
expect(transaction).to receive(:track_query)
.with('SELECT 1', [], time, time)
subscription.publish('test', time, time, nil, query_data)
end
it 'only tracks queries triggered from the transaction thread' do
expect(transaction).not_to receive(:track_query)
Thread.new { subscription.publish('test', time, time, nil, query_data) }
.join
end
end
describe '#subscribe_to_action_view' do
let(:subscription) { transaction.subscribe_to_action_view }
let(:time) { Time.now }
let(:view_data) { { identifier: 'foo.rb' } }
after do
ActiveSupport::Notifications.unsubscribe(subscription)
end
it 'tracks rendered views' do
expect(transaction).to receive(:track_view).with('foo.rb')
subscription.publish('test', time, time, nil, view_data)
end
it 'only tracks views rendered from the transaction thread' do
expect(transaction).not_to receive(:track_view)
Thread.new { subscription.publish('test', time, time, nil, view_data) }
.join
end
end
end
| 27.504237 | 78 | 0.671545 |
d53bc2030ea42078892d627a00c548b3ef46bbb3 | 382 | require 'rakuten_web_service/resource'
require 'rakuten_web_service/ichiba/item'
module RakutenWebService
module Ichiba
class Shop < Resource
attribute :shopName, :shopCode, :shopUrl, :shopAffiliateUrl
def items(options={})
options = options.merge(shop_code: self.code)
RakutenWebService::Ichiba::Item.search(options)
end
end
end
end
| 23.875 | 65 | 0.71466 |
e922cbbf44e7717ca8ae2ed382fb9bc7a10b58bf | 3,268 | class Cased < Formula
include Language::Python::Virtualenv
desc "Cased command-line client"
homepage "https://github.com/cased/cli"
url "https://files.pythonhosted.org/packages/84/03/4f13363146b1c0f85f2f0ee88ba8dd338436ee20fc5bdf2f0c51879695d5/cased-0.6.6.tar.gz"
sha256 "9ef325a0789e0f36cb86f982c3e96d9ea900338a1cbad573e62ce73a23d05c79"
depends_on "python3"
resource "certifi" do
url "https://files.pythonhosted.org/packages/06/a9/cd1fd8ee13f73a4d4f491ee219deeeae20afefa914dfb4c130cfc9dc397a/certifi-2020.12.5.tar.gz"
sha256 "1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/ee/2d/9cdc2b527e127b4c9db64b86647d567985940ac3698eeabc7ffaccb4ea61/chardet-4.0.0.tar.gz"
sha256 "0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"
end
resource "gitdb" do
url "https://files.pythonhosted.org/packages/34/fe/9265459642ab6e29afe734479f94385870e8702e7f892270ed6e52dd15bf/gitdb-4.0.7.tar.gz"
sha256 "96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"
end
resource "GitPython" do
url "https://files.pythonhosted.org/packages/4a/8a/1519359949ce416eb059966c483fe340547a6fb5efb9f1dbcc0b33483146/GitPython-3.1.15.tar.gz"
sha256 "05af150f47a5cca3f4b0af289b73aef8cf3c4fe2385015b06220cbcdee48bb6e"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ea/b7/e0e3c1c467636186c39925827be42f16fee389dc404ac29e930e9136be70/idna-2.10.tar.gz"
sha256 "b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"
end
resource "packaging" do
url "https://files.pythonhosted.org/packages/86/3c/bcd09ec5df7123abcf695009221a52f90438d877a2f1499453c6938f5728/packaging-20.9.tar.gz"
sha256 "5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"
end
resource "pyparsing" do
url "https://files.pythonhosted.org/packages/2a/55/640c081b8b9d974665aa991be7b55ac75faa0e608a4125e03a96c253c1a1/pyparsing-3.0.0b2.tar.gz"
sha256 "1c6409312ce2ce2997896af5756753778d5f1603666dba5587804f09ad82ed27"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/6b/47/c14abc08432ab22dc18b9892252efaf005ab44066de871e72a38d6af464b/requests-2.25.1.tar.gz"
sha256 "27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"
end
resource "smmap" do
url "https://files.pythonhosted.org/packages/dd/d4/2b4f196171674109f0fbb3951b8beab06cd0453c1b247ec0c4556d06648d/smmap-4.0.0.tar.gz"
sha256 "7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"
end
resource "typing-extensions" do
url "https://files.pythonhosted.org/packages/16/06/0f7367eafb692f73158e5c5cbca1aec798cdf78be5167f6415dd4205fa32/typing_extensions-3.7.4.3.tar.gz"
sha256 "99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/cb/cf/871177f1fc795c6c10787bc0e1f27bb6cf7b81dbde399fd35860472cecbc/urllib3-1.26.4.tar.gz"
sha256 "e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"
end
def install
virtualenv_create(libexec, "python3")
virtualenv_install_with_resources
end
test do
system `cased version`
end
end
| 43.573333 | 149 | 0.824663 |
01521a1b7db52882ef9580868973553ae1ddc3ea | 125 | require "oyaji/version"
require "rest-client"
require "json"
require "hashie"
require "oyaji/helper"
require "oyaji/client"
| 15.625 | 23 | 0.768 |
7a52edc53093cf966ad0af0013e3b7c441c1b7d7 | 6,440 | module Rubinius
module Stats
module Units
# Time unit factors, where one second is
# 10^9 nanoseconds (ns)
# 10^6 microseconds (us)
# 10^3 milliseconds (ms)
SEC_PER_NS = 1.0e-9
MSEC_PER_NS = 1.0e-6
USEC_PER_NS = 1.0e-3
KBYTES = 1024
def usec(ns)
ns * USEC_PER_NS
end
def msec(ns)
ns * MSEC_PER_NS
end
def sec(ns)
ns * SEC_PER_NS
end
def auto_time(time)
return "0" if time == 0
elapsed = sec time
return "%.2fs" % elapsed if elapsed > 1.0
elapsed = msec time
return "%.2fm" % elapsed if elapsed > 1.0
elapsed = usec time
return "%.2fu" % elapsed if elapsed > 1.0
"%dn" % time
end
# Thanks Ruby Quiz #113
def comma(number, digits=2)
if number.is_a? Float
str = "%.#{digits}f" % number
else
str = number.to_s
end
str.reverse.scan(/(?:\d*\.)?\d{1,3}-?/).join(',').reverse
end
def percentage(part, whole, units=:sec)
case units
when :sec
part = sec part
whole = sec whole
when :msec
part = msec part
whole = msec whole
end
"%.1f%%" % (part * 100.0 / whole)
end
def auto_bytes(bytes)
return bytes.to_s if bytes < KBYTES
bytes /= KBYTES.to_f
return "%.1fK" % bytes if bytes < KBYTES
"%.1fM" % (bytes / KBYTES)
end
end
module Printer
def columns(*columns)
@width = columns.inject(0) { |sum, x| sum + x }
text_column = "%%-%ds" % columns.shift
@value_format = "#{text_column}%#{columns[1]}s\n"
format = columns.map { |c| "%%%ds" % c }.join
@stats_format = "#{text_column}#{format}\n"
@header_format = "\n" << @stats_format
end
def headings(*headings)
@headings = headings
end
def heading(leader)
printf @header_format, leader, *@headings
separator
end
def separator(limit=nil)
width = limit || @width
puts "-" * width
end
def string(metric, string)
printf @value_format, metric, string
end
def value(metric, value, meth=:comma)
printf @value_format, metric, send(meth, value)
end
def statistics(metric, data, meth=:comma)
values = [data[:total], data[:max], data[:min], data[:average]]
printf @stats_format, metric, *values.map { |d| send meth, d }
end
end
class GC
include Units
include Printer
def clear
Ruby.primitive :vm_stats_gc_clear
raise PrimitiveFailure, "Rubinius::Stats::GC.clear primitive failed"
end
def info
Ruby.primitive :vm_stats_gc_info
raise PrimitiveFailure, "Rubinius::Stats::GC.info primitive failed"
end
def show
unless data = info
puts "\nNo GC stats information available. Build with 'rake build:stats'"
return
end
columns 25, 14, 14, 14, 14
headings "total", "max", "min", "average"
collect_young = data[:collect_young]
allocate_young = data[:allocate_young]
collect_mature = data[:collect_mature]
allocate_mature = data[:allocate_mature]
total = allocate_young[:total] + collect_young[:total] +
allocate_mature[:total] + collect_mature[:total]
puts "\nGarbage collector stats:"
# TODO: make all configured values, even defaults, visible in config
young = "Young (%d)" % (Rubinius::RUBY_CONFIG["rbx.gc.lifetime"] || 6)
heading young
value "Collections", collect_young[:timings]
statistics " times", collect_young, :auto_time
statistics " objects promoted", collect_young[:objects_promoted]
statistics " objects copied", collect_young[:objects_copied]
statistics " bytes copied", collect_young[:bytes_copied], :auto_bytes
puts "Lifetimes"
collect_young[:lifetimes].each_with_index do |lifetime, index|
value " #{index}", lifetime
end
string "% of GC time", "(#{percentage(collect_young[:total], total)})"
separator 2
value "Allocations", allocate_young[:timings]
statistics " times", allocate_young, :auto_time
value " bytes allocated", allocate_young[:bytes_allocated], :auto_bytes
object_types allocate_young[:object_types]
string "% of GC time", "(#{percentage(allocate_young[:total], total)})"
heading "Mature"
value "Collections", collect_mature[:timings]
statistics " times", collect_mature, :auto_time
string "% of GC time", "(#{percentage(collect_mature[:total], total)})"
separator 2
value "Allocations", allocate_mature[:timings]
statistics " times", allocate_mature, :auto_time
value " chunks added", allocate_mature[:chunks_added]
value " large objects", allocate_mature[:large_objects]
object_types allocate_mature[:object_types]
string "% of GC time", "(#{percentage(allocate_mature[:total], total)})"
printf "\nTotal time spent in GC: %s (%s)\n\n",
auto_time(total), percentage(total, data[:clock])
end
def object_types(data)
return if data.empty? || !Rubinius::RUBY_CONFIG["rbx.gc_stats.object_types"]
total = 0
puts "Object types"
data.each_with_index do |count, type|
next if count == 0
value " #{object_type type}", count
total += count
end
value " Total types", total
end
private :object_types
def object_type(type)
unless @types
# TODO: make an interface to these available, potentially as part of
# making the compiler aware of these types.
object_types = File.dirname(__FILE__) + '/../../vm/gen/object_types.hpp'
if File.exists? object_types
@types = IO.read(object_types).scan(/\b(\w*)Type/).flatten
else
@types = []
end
end
@types[type]
end
private :object_type
end
end
end
| 28.75 | 89 | 0.562267 |
4ab65f1874c0367caa186767240460459d8d107d | 270 | class Foreground
def initialize
@image = Gosu::Image.new("#{ROOT_PATH}/assets/images/foreground.png")
@x = 0
end
def move(speed)
@x -= speed * 0.5
if @x < WIDTH - @image.width
@x = 0
end
end
def draw
@image.draw(@x, 0, ZOrder::FOREGROUND)
end
end
| 15 | 71 | 0.62963 |
08b6fbed718aa2f997b0b1627a88d9f019a16a49 | 96 | module Matestack::Ui::Core::S
class S < Matestack::Ui::Core::Component::Static
end
end
| 16 | 50 | 0.666667 |
38ad21760ab36892fdfde45a0397a0d3e021b76f | 314 | module UsersHelper
# Returns the Gravatar for the given user.
def gravatar_for(user, size: 80)
gravatar_id = Digest::MD5::hexdigest(user.email.downcase)
gravatar_url = "https://secure.gravatar.com/avatar/#{gravatar_id}?s=#{size}"
image_tag(gravatar_url, alt: user.name, class: "gravatar")
end
end | 39.25 | 80 | 0.726115 |
bf822114d74f825147291440821167dcacaf6248 | 1,915 | # -*- encoding: utf-8 -*-
# stub: faraday 1.3.0 ruby lib spec/external_adapters
Gem::Specification.new do |s|
s.name = "faraday".freeze
s.version = "1.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "bug_tracker_uri" => "https://github.com/lostisland/faraday/issues", "changelog_uri" => "https://github.com/lostisland/faraday/releases/tag/v1.3.0", "homepage_uri" => "https://lostisland.github.io/faraday", "source_code_uri" => "https://github.com/lostisland/faraday" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze, "spec/external_adapters".freeze]
s.authors = ["@technoweenie".freeze, "@iMacTia".freeze, "@olleolleolle".freeze]
s.date = "2020-12-31"
s.email = "[email protected]".freeze
s.homepage = "https://lostisland.github.io/faraday".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.4".freeze)
s.rubygems_version = "2.7.6".freeze
s.summary = "HTTP/REST API client library.".freeze
s.installed_by_version = "2.7.6" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<faraday-net_http>.freeze, ["~> 1.0"])
s.add_runtime_dependency(%q<multipart-post>.freeze, ["< 3", ">= 1.2"])
s.add_runtime_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
else
s.add_dependency(%q<faraday-net_http>.freeze, ["~> 1.0"])
s.add_dependency(%q<multipart-post>.freeze, ["< 3", ">= 1.2"])
s.add_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
end
else
s.add_dependency(%q<faraday-net_http>.freeze, ["~> 1.0"])
s.add_dependency(%q<multipart-post>.freeze, ["< 3", ">= 1.2"])
s.add_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
end
end
| 47.875 | 314 | 0.675718 |
f7c911b57458727989824c2bd196de949e6f024b | 1,986 | require 'fog/core/collection'
require 'fog/hp/models/meta_parent'
require 'fog/hp/models/compute/meta'
require 'fog/hp/models/compute/image'
require 'fog/hp/models/compute/server'
module Fog
module Compute
class HP
class Metadata < Fog::Collection
model Fog::Compute::HP::Meta
include Fog::Compute::HP::MetaParent
def all
requires :parent
if @parent.id
metadata = service.list_metadata(collection_name, @parent.id).body['metadata']
metas = []
metadata.each_pair {|k,v| metas << {"key" => k, "value" => v} }
load(metas)
end
end
def destroy(key)
requires :parent
service.delete_meta(collection_name, @parent.id, key)
rescue Fog::Compute::HP::NotFound
nil
end
def get(key)
requires :parent
data = service.get_meta(collection_name, @parent.id, key).body["meta"]
metas = []
data.each_pair {|k,v| metas << {"key" => k, "value" => v} }
new(metas[0])
rescue Fog::Compute::HP::NotFound
nil
end
def new(attributes = {})
requires :parent
super({ :parent => @parent }.merge!(attributes))
end
def set(data=nil)
requires :parent
service.set_metadata(collection_name, @parent.id, meta_hash(data))
end
def update(data=nil)
requires :parent
service.update_metadata(collection_name, @parent.id, meta_hash(data))
end
private
def meta_hash(data=nil)
if data.nil?
data={}
self.each do |meta|
if meta.is_a?(Fog::Compute::HP::Meta) then
data.store(meta.key, meta.value)
else
data.store(meta["key"], meta["value"])
end
end
end
data
end
end
end
end
end
| 24.825 | 90 | 0.531722 |
21606e0c7b8b79559592f95963bc9d4f051f4662 | 1,184 | # frozen_string_literal: true
require 'spec_helper'
require 'bolt/executor'
require 'bolt/target'
describe 'add_facts' do
include PuppetlabsSpec::Fixtures
let(:executor) { Bolt::Executor.new }
let(:inventory) { mock('inventory') }
let(:target) { Bolt::Target.new('example') }
around(:each) do |example|
Puppet[:tasks] = true
Puppet.features.stubs(:bolt?).returns(true)
Puppet.override(bolt_executor: executor, bolt_inventory: inventory) do
example.run
end
end
it 'should set a fact on a target' do
data = { 'a' => 'b', 'c' => 'd' }
inventory.expects(:add_facts).with(target, data).returns(data)
is_expected.to run.with_params(target, data).and_return(data)
end
it 'errors when passed invalid data types' do
is_expected.to run.with_params(target, 1)
.and_raise_error(ArgumentError,
"'add_facts' parameter 'facts' expects a Hash value, got Integer")
end
it 'reports the call to analytics' do
executor.expects(:report_function_call).with('add_facts')
inventory.expects(:add_facts).returns({})
is_expected.to run.with_params(target, {})
end
end
| 28.878049 | 105 | 0.666385 |
03aaa911728c6d220712a399ed13c953e07e9f4a | 1,225 | # frozen_string_literal: true
class CreateDelayedJobs < ActiveRecord::Migration[6.1]
def self.up
create_table :delayed_jobs do |table|
# Allows some jobs to jump to the front of the queue
table.integer :priority, default: 0, null: false
# Provides for retries, but still fail eventually.
table.integer :attempts, default: 0, null: false
# YAML-encoded string of the object that will do work
table.text :handler, null: false
# reason for last failure (See Note below)
table.text :last_error
# When to run. Could be Time.zone.now for immediately, or sometime in the future.
table.datetime :run_at
# Set when a client is working on this object
table.datetime :locked_at
# Set when all retries have failed (actually, by default, the record is deleted instead)
table.datetime :failed_at
# Who is working on this object (if locked)
table.string :locked_by
# The name of the queue this job is in
table.string :queue
table.timestamps null: true
end
add_index :delayed_jobs, %i[priority run_at], name: 'delayed_jobs_priority'
end
def self.down
drop_table :delayed_jobs
end
end
| 36.029412 | 94 | 0.682449 |
7a2368ceefc9f9144cad332dfc0d8e095fb2d5fd | 103,927 | require "rexml/document"
class MiqAeClassController < ApplicationController
include MiqAeClassHelper
include AutomateTreeHelper
include Mixins::GenericSessionMixin
include Mixins::BreadcrumbsMixin
before_action :check_privileges
before_action :get_session_data
after_action :cleanup_action
after_action :set_session_data
MIQ_AE_COPY_ACTIONS = %w[miq_ae_class_copy miq_ae_instance_copy miq_ae_method_copy].freeze
# GET /automation_classes
# GET /automation_classes.xml
def index
redirect_to(:action => 'explorer')
end
def change_tab
# resetting flash array so messages don't get displayed when tab is changed
@flash_array = []
@explorer = true
@record = @ae_class = MiqAeClass.find(x_node.split('-').last)
@sb[:active_tab] = params[:tab_id]
render :update do |page|
page << javascript_prologue
page.replace("flash_msg_div", :partial => "layouts/flash_msg")
page << javascript_reload_toolbars
page << "miqSparkle(false);"
end
end
AE_X_BUTTON_ALLOWED_ACTIONS = {
'instance_fields_edit' => :edit_instance,
'method_inputs_edit' => :edit_mehod,
'miq_ae_class_copy' => :copy_objects,
'miq_ae_class_edit' => :edit_class,
'miq_ae_class_delete' => :deleteclasses,
'miq_ae_class_new' => :new,
'miq_ae_domain_delete' => :delete_domain,
'miq_ae_domain_edit' => :edit_domain,
'miq_ae_domain_lock' => :domain_lock,
'miq_ae_domain_unlock' => :domain_unlock,
'miq_ae_git_refresh' => :git_refresh,
'miq_ae_domain_new' => :new_domain,
'miq_ae_domain_priority_edit' => :domains_priority_edit,
'miq_ae_field_edit' => :edit_fields,
'miq_ae_field_seq' => :fields_seq_edit,
'miq_ae_instance_copy' => :copy_objects,
'miq_ae_instance_delete' => :deleteinstances,
'miq_ae_instance_edit' => :edit_instance,
'miq_ae_instance_new' => :new_instance,
'miq_ae_item_edit' => :edit_item,
'miq_ae_method_copy' => :copy_objects,
'miq_ae_method_delete' => :deletemethods,
'miq_ae_method_edit' => :edit_method,
'miq_ae_method_new' => :new_method,
'miq_ae_namespace_delete' => :delete_ns,
'miq_ae_namespace_edit' => :edit_ns,
'miq_ae_namespace_new' => :new_ns,
}.freeze
def x_button
generic_x_button(AE_X_BUTTON_ALLOWED_ACTIONS)
end
def explorer
@trees = []
@sb[:action] = nil
@explorer = true
# don't need right bottom cell
@breadcrumbs = []
bc_name = _("Explorer")
bc_name += _(" (filtered)") if @filters && (@filters[:tags].present? || @filters[:cats].present?)
drop_breadcrumb(:name => bc_name, :url => "/miq_ae_class/explorer")
@lastaction = "replace_right_cell"
build_accordions_and_trees
@right_cell_text ||= _("Datastore")
render :layout => "application"
end
# Display any Automate Domain through Tenant's textual summary
def show
@sb[:action] = nil
@explorer = true
build_accordions_and_trees
self.x_node = "aen-#{params[:id]}"
get_node_info(x_node)
render :layout => 'application'
end
def set_right_cell_text(id, rec = nil)
nodes = id.split('-')
case nodes[0]
when "root"
txt = _("Datastore")
@sb[:namespace_path] = ""
when "aec"
txt = _('Automate Class')
@sb[:namespace_path] = rec.fqname
when "aei"
txt = _('Automate Instance')
updated_by = rec.updated_by ? _(" by %{user}") % {:user => rec.updated_by} : ""
@sb[:namespace_path] = rec.fqname
@right_cell_text = _("%{model} [%{name} - Updated %{time}%{update}]") % {
:model => txt,
:name => get_rec_name(rec),
:time => format_timezone(rec.updated_on, Time.zone, "gtl"),
:update => updated_by
}
when "aem"
txt = _('Automate Method')
updated_by = rec.updated_by ? _(" by %{user}") % {:user => rec.updated_by} : ""
@sb[:namespace_path] = rec.fqname
@right_cell_text = _("%{model} [%{name} - Updated %{time}%{update}]") % {
:model => txt,
:name => get_rec_name(rec),
:time => format_timezone(rec.updated_on, Time.zone, "gtl"),
:update => updated_by
}
when "aen"
txt = rec.domain? ? _('Automate Domain') : _('Automate Namespace')
@sb[:namespace_path] = rec.fqname
end
@sb[:namespace_path]&.gsub!(%r{\/}, " / ")
@right_cell_text = "#{txt} #{_("\"%s\"") % get_rec_name(rec)}" unless %w[root aei aem].include?(nodes[0])
end
def expand_toggle
render :update do |page|
page << javascript_prologue
if @sb[:squash_state]
@sb[:squash_state] = false
page << javascript_show("inputs_div")
page << "$('#exp_collapse_img i').attr('class','fa fa-angle-up fa-lg')"
page << "$('#exp_collapse_img').prop('title', 'Hide Input Parameters');"
page << "$('#exp_collapse_img').prop('alt', 'Hide Input Parameters');"
else
@sb[:squash_state] = true
page << javascript_hide("inputs_div")
page << "$('#exp_collapse_img i').attr('class','fa fa-angle-down fa-lg')"
page << "$('#exp_collapse_img').prop('title', 'Show Input Parameters');"
page << "$('#exp_collapse_img').prop('alt', 'Show Input Parameters');"
end
end
end
def get_namespace_node_info(node_id)
@record = MiqAeNamespace.find(node_id)
# need to set record as Domain record if it's a domain, editable_domains, enabled_domains,
# visible domains methods returns list of Domains, need this for toolbars to hide/disable correct records.
@record = MiqAeDomain.find(node_id) if @record.domain?
@version_message = domain_version_message(@record) if @record.domain?
if @record.nil?
set_root_node
else
@records = []
# Add Namespaces under a namespace
details = @record.ae_namespaces
@records += details.sort_by { |d| [d.display_name.to_s, d.name.to_s] }
# Add classes under a namespace
details_cls = @record.ae_classes
unless details_cls.nil?
@records += details_cls.sort_by { |d| [d.display_name.to_s, d.name.to_s] }
end
@combo_xml = build_type_options
@dtype_combo_xml = build_dtype_options
@sb[:active_tab] = "details"
set_right_cell_text(x_node, @record)
end
end
def get_root_node_info
@grid_data = User.current_tenant.visible_domains
add_all_domains_version_message(@grid_data)
@record = nil
@right_cell_text = _("Datastore")
@sb[:active_tab] = "namespaces"
set_right_cell_text(x_node)
end
def get_node_info(node, _show_list = true)
node_type, node_id = valid_active_node(node).split('-')
@sb[:row_selected] = nil if params[:action] == "tree_select"
case node_type
when 'aec' then get_class_node_info(node_id)
when 'aei' then get_instance_node_info(node_id)
when 'aem' then get_method_node_info(node_id)
when 'aen' then get_namespace_node_info(node_id)
else get_root_node_info
end
end
def domain_version_message(domain)
version = domain.version
available_version = domain.available_version
return if version.nil? || available_version.nil?
if version != available_version
_("%{name} domain: Current version - %{version}, Available version - %{available_version}") %
{:name => domain.name, :version => version, :available_version => available_version}
end
end
def add_all_domains_version_message(domains)
@version_messages = domains.collect { |dom| domain_version_message(dom) }.compact
end
# Tree node selected in explorer
def tree_select
@explorer = true
@lastaction = "explorer"
self.x_active_tree = params[:tree] if params[:tree]
self.x_node = params[:id]
@sb[:action] = nil
replace_right_cell
end
# Check for parent nodes missing from ae tree and return them if any
def open_parent_nodes(record)
nodes = record.fqname.split("/")
parents = []
nodes.each_with_index do |_, i|
if i == nodes.length - 1
selected_node = x_node.split("-")
parents.push(record.ae_class) if %w[aei aem].include?(selected_node[0])
self.x_node = TreeBuilder.build_node_id(record)
parents.push(record)
else
ns = MiqAeNamespace.lookup_by_fqname(nodes[0..i].join("/"))
parents.push(ns) if ns
end
end
build_and_add_nodes(parents)
end
def build_and_add_nodes(parents)
existing_node = find_existing_node(parents)
return nil if existing_node.nil?
children = tree_add_child_nodes(existing_node)
# set x_node after building tree nodes so parent node of new nodes can be selected in the tree.
unless params[:action] == "x_show"
self.x_node = if @record.kind_of?(MiqAeClass)
"aen-#{@record.namespace_id}"
else
"aec-#{@record.class_id}"
end
end
{:key => existing_node, :nodes => children}
end
def find_existing_node(parents)
existing_node = nil
# Go up thru the parents and find the highest level unopened, mark all as opened along the way
unless parents.empty? || # Skip if no parents or parent already open
x_tree[:open_nodes].include?(x_build_node_id(parents.last))
parents.reverse_each do |p|
p_node = x_build_node_id(p)
if x_tree[:open_nodes].include?(p_node)
return p_node
else
x_tree[:open_nodes].push(p_node)
existing_node = p_node
end
end
end
existing_node
end
def replace_right_cell(options = {})
@explorer = true
replace_trees = options[:replace_trees]
# FIXME: is the following line needed?
# replace_trees = @replace_trees if @replace_trees #get_node_info might set this
nodes = x_node.split('-')
@in_a_form = @in_a_form_fields = @in_a_form_props = false if params[:button] == "cancel" ||
(%w[save add].include?(params[:button]) && replace_trees)
add_nodes = open_parent_nodes(@record) if params[:button] == "copy" ||
params[:action] == "x_show"
get_node_info(x_node) if !@in_a_form && !@angular_form && @button != "reset"
c_tb = build_toolbar(center_toolbar_filename) unless @in_a_form
presenter = ExplorerPresenter.new(
:active_tree => x_active_tree,
:right_cell_text => @right_cell_text,
:remove_nodes => add_nodes, # remove any existing nodes before adding child nodes to avoid duplication
:add_nodes => add_nodes
)
trees = build_replaced_trees(replace_trees, %i[ae])
reload_trees_by_presenter(presenter, trees)
if @sb[:action] == "miq_ae_field_seq"
presenter.update(:class_fields_div, r[:partial => "fields_seq_form"])
elsif @sb[:action] == "miq_ae_domain_priority_edit"
presenter.update(:ns_list_div, r[:partial => "domains_priority_form"])
elsif MIQ_AE_COPY_ACTIONS.include?(@sb[:action])
presenter.update(:main_div, r[:partial => "copy_objects_form"])
else
if @sb[:action] == "miq_ae_class_edit"
@sb[:active_tab] = 'props'
else
@sb[:active_tab] ||= 'instances'
end
presenter.update(:main_div, r[:partial => 'all_tabs'])
end
presenter.replace('flash_msg_div', r[:partial => "layouts/flash_msg"]) if @flash_array
if @in_a_form && !@angular_form
action_url = create_action_url(nodes.first)
# incase it was hidden for summary screen, and incase there were no records on show_list
presenter.show(:paging_div, :form_buttons_div)
presenter.update(:form_buttons_div, r[
:partial => "layouts/x_edit_buttons",
:locals => {
:record_id => @edit[:rec_id],
:action_url => action_url,
:copy_button => action_url == "copy_objects",
:multi_record => @sb[:action] == "miq_ae_domain_priority_edit",
:serialize => @sb[:active_tab] == 'methods',
}
])
else
# incase it was hidden for summary screen, and incase there were no records on show_list
presenter.hide(:paging_div, :form_buttons_div)
end
presenter[:lock_sidebar] = @in_a_form && @edit
if @record.kind_of?(MiqAeMethod) && !@in_a_form && !@angular_form
presenter.set_visibility(@record.inputs.present?, :params_div)
end
presenter[:clear_gtl_list_grid] = @gtl_type && @gtl_type != 'list'
# Rebuild the toolbars
if c_tb.present?
presenter.show(:toolbar)
presenter.reload_toolbars(:center => c_tb)
else
presenter.hide(:toolbar)
end
presenter[:record_id] = determine_record_id_for_presenter
presenter[:osf_node] = x_node
presenter.show_miq_buttons if @changed
presenter.update(:breadcrumbs, r[:partial => 'layouts/breadcrumbs'])
render :json => presenter.for_render
end
def build_type_options
MiqAeField.available_aetypes.collect { |t| [t.titleize, t, {"data-icon" => ae_field_fonticon(t)}] }
end
def build_dtype_options
MiqAeField.available_datatypes_for_ui.collect { |t| [t.titleize, t, {"data-icon" => ae_field_fonticon(t)}] }
end
def class_and_glyph(cls)
case cls.to_s.split("::").last
when "MiqAeClass"
cls = "aec"
glyphicon = "ff ff-class"
when "MiqAeNamespace"
cls = "aen"
glyphicon = "pficon pficon-folder-open"
when "MiqAeInstance"
cls = "aei"
glyphicon = "fa fa-file-text-o"
when "MiqAeField"
cls = "Field"
glyphicon = "ff ff-field"
when "MiqAeMethod"
cls = "aem"
glyphicon = "ff ff-method"
end
[cls, glyphicon]
end
def build_details_grid(view, mode = true)
xml = REXML::Document.load("")
xml << REXML::XMLDecl.new(1.0, "UTF-8")
# Create root element
root = xml.add_element("rows")
# Build the header row
head = root.add_element("head")
header = ""
head.add_element("column", "type" => "ch", "width" => 25, "align" => "center") # Checkbox column
new_column = head.add_element("column", "width" => "30", "align" => "left", "sort" => "na")
new_column.add_attribute("type", 'ro')
new_column.text = header
new_column = head.add_element("column", "width" => "*", "align" => "left", "sort" => "na")
new_column.add_attribute("type", 'ro')
new_column.text = header
# passing in mode, don't need to sort records for namaspace node, it will be passed in sorted order, need to show Namesaces first and then Classes
records =
if mode
view.sort_by { |v| [v.display_name.to_s, v.name.to_s] }
else
view
end
records.each do |kids|
cls, glyphicon = class_and_glyph(kids.class)
rec_name = get_rec_name(kids)
if rec_name
rec_name = rec_name.gsub(/\n/, "\\n")
rec_name = rec_name.gsub(/\t/, "\\t")
rec_name = rec_name.tr('"', "'")
rec_name = ERB::Util.html_escape(rec_name)
rec_name = rec_name.gsub(/\\/, "\")
end
srow = root.add_element("row", "id" => "#{cls}-#{kids.id}", "style" => "border-bottom: 1px solid #CCCCCC;color:black; text-align: center")
srow.add_element("cell").text = "0" # Checkbox column unchecked
srow.add_element("cell", "image" => "blank.png", "title" => cls.to_s, "style" => "border-bottom: 1px solid #CCCCCC;text-align: left;height:28px;").text = REXML::CData.new("<i class='#{glyphicon}' alt='#{cls}' title='#{cls}'></i>")
srow.add_element("cell", "image" => "blank.png", "title" => rec_name.to_s, "style" => "border-bottom: 1px solid #CCCCCC;text-align: left;height:28px;").text = rec_name
end
xml.to_s
end
def edit_item
item = find_checked_items
@sb[:row_selected] = item[0]
if @sb[:row_selected].split('-')[0] == "aec"
edit_class
else
edit_ns
end
end
def edit_class
assert_privileges("miq_ae_class_edit")
if params[:pressed] == "miq_ae_item_edit" # came from Namespace details screen
id = @sb[:row_selected].split('-')
@ae_class = find_record_with_rbac(MiqAeClass, id[1])
else
@ae_class = find_record_with_rbac(MiqAeClass, params[:id])
end
set_form_vars
# have to get name and set node info, to load multiple tabs correctly
# rec_name = get_rec_name(@ae_class)
# get_node_info("aec-#{@ae_class.id}")
@in_a_form = true
@in_a_form_props = true
session[:changed] = @changed = false
replace_right_cell
end
def edit_fields
assert_privileges("miq_ae_field_edit")
if params[:pressed] == "miq_ae_item_edit" # came from Namespace details screen
id = @sb[:row_selected].split('-')
@ae_class = find_record_with_rbac(MiqAeClass, id[1])
else
@ae_class = find_record_with_rbac(MiqAeClass, params[:id])
end
fields_set_form_vars
@in_a_form = true
@in_a_form_fields = true
session[:changed] = @changed = false
replace_right_cell
end
def edit_domain
assert_privileges("miq_ae_domain_edit")
edit_domain_or_namespace
end
def edit_ns
assert_privileges("miq_ae_namespace_edit")
@angular_form = true
edit_domain_or_namespace
end
def edit_instance
assert_privileges("miq_ae_instance_edit")
obj = find_checked_items
if obj.present?
@sb[:row_selected] = obj[0]
id = @sb[:row_selected].split('-')
else
id = x_node.split('-')
end
initial_setup_for_instances_form_vars(id[1])
set_instances_form_vars
@in_a_form = true
session[:changed] = @changed = false
replace_right_cell
end
def edit_method
assert_privileges("miq_ae_method_edit")
obj = find_checked_items
if obj.present?
@sb[:row_selected] = obj[0]
id = @sb[:row_selected].split('-')
else
id = x_node.split('-')
end
@ae_method = find_record_with_rbac(MiqAeMethod, id[1])
@selectable_methods = embedded_method_regex(@ae_method.fqname)
if playbook_style_location?(@ae_method.location)
# these variants are implemented in Angular
angular_form_specific_data
@right_cell_text = _("Editing Automate Method \"%{name}\"") % {:name => @ae_method.name}
else
# other variants are implemented server side
set_method_form_vars
@in_a_form = true
end
session[:changed] = @changed = false
replace_right_cell
end
# Set form variables for edit
def set_instances_form_vars
session[:inst_data] = {}
@edit = {
:ae_inst_id => @ae_inst.id,
:ae_class_id => @ae_class.id,
:rec_id => @ae_inst.id || nil,
:key => "aeinst_edit__#{@ae_inst.id || "new"}",
:new => {}
}
@edit[:new][:ae_inst] = {}
instance_column_names.each do |fld|
@edit[:new][:ae_inst][fld] = @ae_inst.send(fld)
end
@edit[:new][:ae_values] = @ae_values.collect do |ae_value|
value_column_names.each_with_object({}) do |fld, hash|
hash[fld] = ae_value.send(fld)
end
end
@edit[:new][:ae_fields] = @ae_class.ae_fields.collect do |ae_field|
field_column_names.each_with_object({}) do |fld, hash|
hash[fld] = ae_field.send(fld)
end
end
@edit[:current] = copy_hash(@edit[:new])
@right_cell_text = if @edit[:rec_id].nil?
_("Adding a new Automate Instance")
else
_("Editing Automate Instance \"%{name}\"") % {:name => @ae_inst.name}
end
session[:edit] = @edit
end
# AJAX driven routine to check for changes in ANY field on the form
def form_instance_field_changed
return unless load_edit("aeinst_edit__#{params[:id]}", "replace_cell__explorer")
get_instances_form_vars
javascript_miq_button_visibility(@edit[:current] != @edit[:new])
end
def update_instance
assert_privileges("miq_ae_instance_edit")
return unless load_edit("aeinst_edit__#{params[:id]}", "replace_cell__explorer")
get_instances_form_vars
@changed = (@edit[:new] != @edit[:current])
case params[:button]
when "cancel"
@sb[:action] = session[:edit] = nil # clean out the saved info
add_flash(_("Edit of Automate Instance \"%{name}\" was cancelled by the user") % {:name => @ae_inst.name})
@in_a_form = false
replace_right_cell
when "save"
if @edit[:new][:ae_inst]["name"].blank?
add_flash(_("Name is required"), :error)
end
if @flash_array
javascript_flash
return
end
set_instances_record_vars(@ae_inst) # Set the instance record variables, but don't save
# Update the @ae_inst.ae_values directly because of update bug in RAILS
# When saving a parent, the childrens updates are not getting saved
set_instances_value_vars(@ae_values, @ae_inst) # Set the instance record variables, but don't save
begin
MiqAeInstance.transaction do
@ae_inst.ae_values.each { |v| v.value = nil if v.value == "" }
@ae_inst.save!
end
rescue StandardError => bang
add_flash(_("Error during 'save': %{error_message}") % {:error_message => bang.message}, :error)
@in_a_form = true
javascript_flash
else
AuditEvent.success(build_saved_audit(@ae_class, @edit))
@sb[:action] = session[:edit] = nil # clean out the saved info
@in_a_form = false
add_flash(_("Automate Instance \"%{name}\" was saved") % {:name => @ae_inst.name})
replace_right_cell(:replace_trees => [:ae])
nil
end
when "reset"
set_instances_form_vars
add_flash(_("All changes have been reset"), :warning)
@in_a_form = true
@button = "reset"
replace_right_cell
end
end
def create_instance
assert_privileges("miq_ae_instance_new")
case params[:button]
when "cancel"
@sb[:action] = session[:edit] = nil # clean out the saved info
add_flash(_("Add of new Automate Instance was cancelled by the user"))
@in_a_form = false
replace_right_cell
when "add"
return unless load_edit("aeinst_edit__new", "replace_cell__explorer")
get_instances_form_vars
if @edit[:new][:ae_inst]["name"].blank?
add_flash(_("Name is required"), :error)
end
if @flash_array
javascript_flash
return
end
add_aeinst = MiqAeInstance.new
set_instances_record_vars(add_aeinst) # Set the instance record variables, but don't save
set_instances_value_vars(@ae_values) # Set the instance value record variables, but don't save
begin
MiqAeInstance.transaction do
add_aeinst.ae_values = @ae_values
add_aeinst.ae_values.each { |v| v.value = nil if v.value == "" }
add_aeinst.save!
end
rescue StandardError => bang
@in_a_form = true
render_flash(_("Error during 'add': %{message}") % {:message => bang.message}, :error)
else
AuditEvent.success(build_created_audit(add_aeinst, @edit))
add_flash(_("Automate Instance \"%{name}\" was added") % {:name => add_aeinst.name})
@in_a_form = false
add_active_node_to_open_nodes
replace_right_cell(:replace_trees => [:ae])
nil
end
end
end
# Set form variables for edit
def set_form_vars
@in_a_form_props = true
session[:field_data] = {}
@edit = {}
session[:edit] = {}
@edit[:ae_class_id] = @ae_class.id
@edit[:new] = {}
@edit[:current] = {}
@edit[:new_field] = {}
@edit[:rec_id] = @ae_class.id || nil
@edit[:key] = "aeclass_edit__#{@ae_class.id || "new"}"
@edit[:new][:name] = @ae_class.name
@edit[:new][:display_name] = @ae_class.display_name
@edit[:new][:description] = @ae_class.description
@edit[:new][:namespace] = @ae_class.namespace
@edit[:new][:inherits] = @ae_class.inherits
@edit[:inherits_from] = MiqAeClass.all.collect { |c| [c.fqname, c.fqname] }
@edit[:current] = @edit[:new].dup
@right_cell_text = if @edit[:rec_id].nil?
_("Adding a new Automate Class")
else
_("Editing Automate Class \"%{name}\"") % {:name => @ae_class.name}
end
session[:edit] = @edit
@in_a_form = true
end
# Set form variables for edit
def fields_set_form_vars
@in_a_form_fields = true
session[:field_data] = {}
@edit = {
:ae_class_id => @ae_class.id,
:rec_id => @ae_class.id,
:new_field => {},
:key => "aefields_edit__#{@ae_class.id || "new"}",
:fields_to_delete => []
}
@edit[:new] = {
:datatypes => build_dtype_options, # setting dtype combo for adding a new field
:aetypes => build_type_options # setting aetype combo for adding a new field
}
@edit[:new][:fields] = @ae_class.ae_fields.sort_by { |a| [a.priority.to_i] }.collect do |fld|
field_attributes.each_with_object({}) do |column, hash|
hash[column] = fld.send(column)
end
end
# combo to show existing fields
@combo_xml = build_type_options
# passing in fields because that's how many combo boxes we need
@dtype_combo_xml = build_dtype_options
@edit[:current] = copy_hash(@edit[:new])
@right_cell_text = if @edit[:rec_id].nil?
_("Adding a new Class Schema")
else
_("Editing Class Schema \"%{name}\"") % {:name => @ae_class.name}
end
session[:edit] = @edit
end
# Set form variables for edit
def set_method_form_vars
session[:field_data] = {}
@ae_class = ae_class_for_instance_or_method(@ae_method)
@edit = {}
session[:edit] = {}
@edit[:ae_method_id] = @ae_method.id
@edit[:fields_to_delete] = []
@edit[:new] = {}
@edit[:new_field] = {}
@edit[:ae_class_id] = @ae_class.id
@edit[:rec_id] = @ae_method.id || nil
@edit[:key] = "aemethod_edit__#{@ae_method.id || "new"}"
@sb[:squash_state] ||= true
@edit[:new][:name] = @ae_method.name
@edit[:new][:display_name] = @ae_method.display_name
@edit[:new][:scope] = "instance"
@edit[:new][:language] = "ruby"
@edit[:new][:available_expression_objects] = MiqAeMethod.available_expression_objects.sort
@edit[:new][:location] = @ae_method.location
if @edit[:new][:location] == "expression"
expr_hash = YAML.load(@ae_method.data)
if expr_hash[:db] && expr_hash[:expression]
@edit[:new][:expression] = expr_hash[:expression]
expression_setup(expr_hash[:db])
end
else
@edit[:new][:data] = @ae_method.data.to_s
end
@edit[:new][:data] = @ae_method.data.to_s
@edit[:default_verify_status] = @edit[:new][:location] == "inline" && @edit[:new][:data] && @edit[:new][:data] != ""
@edit[:new][:fields] = @ae_method.inputs.collect do |input|
method_input_column_names.each_with_object({}) do |column, hash|
hash[column] = input.send(column)
end
end
@edit[:new][:available_datatypes] = MiqAeField.available_datatypes_for_ui
@edit[:new][:embedded_methods] = @ae_method.embedded_methods
@edit[:current] = copy_hash(@edit[:new])
@right_cell_text = if @edit[:rec_id].nil?
_("Adding a new Automate Method")
else
_("Editing Automate Method \"%{name}\"") % {:name => @ae_method.name}
end
session[:log_depot_default_verify_status] = false
session[:edit] = @edit
session[:changed] = @changed = false
end
def expression_setup(db)
@edit[:expression_method] = true
@edit[:new][:exp_object] = db
if params[:exp_object] || params[:cls_exp_object]
session[:adv_search] = nil
@edit[@expkey] = @edit[:new][@expkey] = nil
end
adv_search_build(db)
end
def expression_cleanup
@edit[:expression_method] = false
end
def ae_class_for_instance_or_method(record)
record.id ? record.ae_class : MiqAeClass.find(x_node.split("-").last)
end
def validate_method_data
return unless load_edit("aemethod_edit__#{params[:id]}", "replace_cell__explorer")
@edit[:new][:data] = params[:cls_method_data] if params[:cls_method_data]
@edit[:new][:data] = params[:method_data] if params[:method_data]
res = MiqAeMethod.validate_syntax(@edit[:new][:data])
line = 0
if !res
add_flash(_("Data validated successfully"))
else
res.each do |err|
line = err[0] if line.zero?
add_flash(_("Error on line %{line_num}: %{err_txt}") % {:line_num => err[0], :err_txt => err[1]}, :error)
end
end
render :update do |page|
page << javascript_prologue
page << "if (miqDomElementExists('cls_method_data')){"
page << "var ta = document.getElementById('cls_method_data');"
page << "} else {"
page << "var ta = document.getElementById('method_data');"
page << "}"
page.replace("flash_msg_div", :partial => "layouts/flash_msg")
page << "var lineHeight = ta.clientHeight / ta.rows;"
page << "ta.scrollTop = (#{line.to_i}-1) * lineHeight;"
if line.positive?
if @sb[:row_selected]
page << "$('#cls_method_data_lines').scrollTop(ta.scrollTop);"
page << "$('#cls_method_data').scrollTop(ta.scrollTop);"
else
page << "$('#method_data_lines').scrollTop(ta.scrollTop);"
page << "$('#method_data').scrollTop(ta.scrollTop);"
end
end
end
end
# AJAX driven routine to check for changes in ANY field on the form
def form_field_changed
return unless load_edit("aeclass_edit__#{params[:id]}", "replace_cell__explorer")
get_form_vars
javascript_miq_button_visibility(@edit[:new] != @edit[:current])
end
# AJAX driven routine to check for changes in ANY field on the form
def fields_form_field_changed
return unless load_edit("aefields_edit__#{params[:id]}", "replace_cell__explorer")
fields_get_form_vars
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
unless %w[up down].include?(params[:button])
if params[:field_datatype] == "password"
page << javascript_hide("field_default_value")
page << javascript_show("field_password_value")
page << "$('#field_password_value').val('');"
session[:field_data][:default_value] =
@edit[:new_field][:default_value] = ''
elsif params[:field_datatype]
page << javascript_hide("field_password_value")
page << javascript_show("field_default_value")
page << "$('#field_default_value').val('');"
session[:field_data][:default_value] =
@edit[:new_field][:default_value] = ''
end
params.each do |field, _value|
next unless field.to_s.starts_with?("fields_datatype")
f = field.split('fields_datatype')
def_field = "fields_default_value_" << f[1].to_s
pwd_field = "fields_password_value_" << f[1].to_s
if @edit[:new][:fields][f[1].to_i]['datatype'] == "password"
page << javascript_hide(def_field)
page << javascript_show(pwd_field)
page << "$('##{pwd_field}').val('');"
else
page << javascript_hide(pwd_field)
page << javascript_show(def_field)
page << "$('##{def_field}').val('');"
end
@edit[:new][:fields][f[1].to_i]['default_value'] = nil
end
end
page << javascript_for_miq_button_visibility_changed(@changed)
end
end
# AJAX driven routine to check for changes in ANY field on the form
def form_method_field_changed
return unless load_edit("aemethod_edit__#{params[:id]}", "replace_cell__explorer")
get_method_form_vars
if @edit[:new][:location] == 'expression'
@edit[:new][:exp_object] ||= @edit[:new][:available_expression_objects].first
exp_object = params[:cls_exp_object] || params[:exp_object] || @edit[:new][:exp_object]
expression_setup(exp_object) if exp_object
else
expression_cleanup
end
if row_selected_in_grid?
@refresh_div = "class_methods_div"
@refresh_partial = "class_methods"
@field_name = "cls_method"
else
@refresh_div = "method_inputs_div"
@refresh_partial = "method_inputs"
@field_name = "method"
end
if @edit[:current][:location] == "inline" && @edit[:current][:data]
@edit[:method_prev_data] = @edit[:current][:data]
end
@edit[:new][:data] = if @edit[:new][:location] == "inline" && !params[:cls_method_data] && !params[:method_data] && !params[:transOne]
if !@edit[:method_prev_data]
MiqAeMethod.default_method_text
else
@edit[:method_prev_data]
end
elsif params[:cls_method_location] || params[:method_location]
# reset data if location is changed
''
else
@edit[:new][:data]
end
@changed = (@edit[:new] != @edit[:current])
@edit[:default_verify_status] = %w[builtin inline].include?(@edit[:new][:location]) && @edit[:new][:data] && @edit[:new][:data] != ""
in_angular = playbook_style_location?(@edit[:new][:location])
angular_form_specific_data if in_angular
render :update do |page|
page << javascript_prologue
page.replace_html('form_div', :partial => 'method_form', :locals => {:prefix => ""}) if @edit[:new][:location] == 'expression'
if in_angular
page.replace_html(
@refresh_div,
:partial => 'angular_method_form',
:locals => {:location => @edit[:new][:location]}
)
page << javascript_hide("form_buttons_div")
elsif @refresh_div && (params[:cls_method_location] || params[:exp_object] || params[:cls_exp_object])
page.replace_html(@refresh_div, :partial => @refresh_partial)
end
if params[:cls_field_datatype]
if session[:field_data][:datatype] == "password"
page << javascript_hide("cls_field_default_value")
page << javascript_show("cls_field_password_value")
page << "$('#cls_field_password_value').val('');"
else
page << javascript_hide("cls_field_password_value")
page << javascript_show("cls_field_default_value")
page << "$('#cls_field_default_value').val('');"
end
end
if params[:method_field_datatype]
if session[:field_data][:datatype] == "password"
page << javascript_hide("method_field_default_value")
page << javascript_show("method_field_password_value")
page << "$('#method_field_password_value').val('');"
else
page << javascript_hide("method_field_password_value")
page << javascript_show("method_field_default_value")
page << "$('#method_field_default_value').val('');"
end
end
params.each do |field, _value|
if field.to_s.starts_with?("cls_fields_datatype_")
f = field.split('cls_fields_datatype_')
def_field = "cls_fields_value_" << f[1].to_s
pwd_field = "cls_fields_password_value_" << f[1].to_s
elsif field.to_s.starts_with?("fields_datatype_")
f = field.split('fields_datatype_')
def_field = "fields_value_" << f[1].to_s
pwd_field = "fields_password_value_" << f[1].to_s
end
next unless f
if @edit[:new][:fields][f[1].to_i]['datatype'] == "password"
page << javascript_hide(def_field)
page << javascript_show(pwd_field)
page << "$('##{pwd_field}').val('');"
else
page << javascript_hide(pwd_field)
page << javascript_show(def_field)
page << "$('##{def_field}').val('');"
end
@edit[:new][:fields][f[1].to_i]['default_value'] = nil
end
if @edit[:default_verify_status] != session[:log_depot_default_verify_status]
session[:log_depot_default_verify_status] = @edit[:default_verify_status]
page << if @edit[:default_verify_status]
"miqValidateButtons('show', 'default_');"
else
"miqValidateButtons('hide', 'default_');"
end
end
page << javascript_for_miq_button_visibility_changed(@changed)
page << "miqSparkle(false)"
end
end
def method_form_fields
assert_privileges("miq_ae_method_edit")
if params[:id] == 'new'
method = MiqAeMethod.new
location = params['location'] || 'playbook'
else
method = MiqAeMethod.find(params[:id])
location = method.location
end
if %w[ansible_job_template ansible_workflow_template].include?(location)
# ManageIQ::Providers::AnsibleTower::Provider.where('zone_id != ?', Zone.maintenance_zone.id)
list_of_managers = ManageIQ::Providers::AnsibleTower::AutomationManager
.where(:enabled => true)
.pluck(:id, :name)
.map { |r| {:id => r[0], :name => r[1]} }
if method&.options[:ansible_template_id]
manager_id = ManageIQ::Providers::ExternalAutomationManager::ConfigurationScript
.find_by(:id => method.options[:ansible_template_id])&.manager_id
end
end
method_hash = {
:name => method.name,
:display_name => method.display_name,
:namespace_path => @sb[:namespace_path],
:class_id => method.id ? method.class_id : MiqAeClass.find(x_node.split("-").last).id,
:location => location,
:location_fancy_name => location_fancy_name(location),
:language => 'ruby',
:scope => "instance",
:managers => list_of_managers,
:manager_id => manager_id,
:available_datatypes => MiqAeField.available_datatypes_for_ui,
:config_info => {
:repository_id => method.options[:repository_id] || '',
:ansible_template_id => method.options[:ansible_template_id] || '',
:playbook_id => method.options[:playbook_id] || '',
:credential_id => method.options[:credential_id] || '',
:vault_credential_id => method.options[:vault_credential_id] || '',
:network_credential_id => method.options[:network_credential_id] || '',
:cloud_credential_id => method.options[:cloud_credential_id] || '',
:verbosity => method.options[:verbosity],
:become_enabled => method.options[:become_enabled] || false,
:execution_ttl => method.options[:execution_ttl] || '',
:hosts => method.options[:hosts] || 'localhost',
:log_output => method.options[:log_output] || 'on_error',
:extra_vars => playbook_style_location?(location) && method.inputs
}
}
render :json => method_hash
end
def update
assert_privileges("miq_ae_class_edit")
return unless load_edit("aeclass_edit__#{params[:id]}", "replace_cell__explorer")
get_form_vars
@changed = (@edit[:new] != @edit[:current])
case params[:button]
when "cancel"
@sb[:action] = session[:edit] = nil # clean out the saved info
add_flash(_("Edit of Automate Class \"%{name}\" was cancelled by the user") % {:name => @ae_class.name})
@in_a_form = false
replace_right_cell
when "save"
ae_class = find_record_with_rbac(MiqAeClass, params[:id])
set_record_vars(ae_class) # Set the record variables, but don't save
begin
MiqAeClass.transaction do
ae_class.save!
end
rescue StandardError => bang
add_flash(_("Error during 'save': %{error_message}") % {:error_message => bang.message}, :error)
session[:changed] = @changed
@changed = true
javascript_flash
else
add_flash(_("Automate Class \"%{name}\" was saved") % {:name => ae_class.fqname})
AuditEvent.success(build_saved_audit(ae_class, @edit))
@sb[:action] = session[:edit] = nil # clean out the saved info
@in_a_form = false
replace_right_cell(:replace_trees => [:ae])
nil
end
when "reset"
set_form_vars
session[:changed] = @changed = false
add_flash(_("All changes have been reset"), :warning)
@button = "reset"
replace_right_cell
else
@changed = session[:changed] = (@edit[:new] != @edit[:current])
replace_right_cell(:replace_trees => [:ae])
end
end
def update_fields
return unless load_edit("aefields_edit__#{params[:id]}", "replace_cell__explorer")
fields_get_form_vars
@changed = (@edit[:new] != @edit[:current])
case params[:button]
when "cancel"
@sb[:action] = session[:edit] = nil # clean out the saved info
add_flash(_("Edit of schema for Automate Class \"%{name}\" was cancelled by the user") % {:name => @ae_class.name})
@in_a_form = false
replace_right_cell
when "save"
ae_class = find_record_with_rbac(MiqAeClass, params[:id])
begin
MiqAeClass.transaction do
set_field_vars(ae_class)
ae_class.ae_fields.destroy(MiqAeField.where(:id => @edit[:fields_to_delete]))
ae_class.ae_fields.each { |fld| fld.default_value = nil if fld.default_value == "" }
ae_class.save!
end
rescue StandardError => bang
add_flash(_("Error during 'save': %{error_message}") % {:error_message => bang.message}, :error)
session[:changed] = @changed = true
javascript_flash
else
add_flash(_("Schema for Automate Class \"%{name}\" was saved") % {:name => ae_class.name})
AuditEvent.success(build_saved_audit(ae_class, @edit))
@sb[:action] = session[:edit] = nil # clean out the saved info
@in_a_form = false
replace_right_cell(:replace_trees => [:ae])
nil
end
when "reset"
fields_set_form_vars
session[:changed] = @changed = false
add_flash(_("All changes have been reset"), :warning)
@button = "reset"
@in_a_form = true
replace_right_cell
else
@changed = session[:changed] = (@edit[:new] != @edit[:current])
replace_right_cell(:replace_trees => [:ae])
end
end
def update_namespace
assert_privileges("miq_ae_namespace_edit")
return unless load_edit("aens_edit__#{params[:id]}", "replace_cell__explorer")
ae_ns = find_record_with_rbac(MiqAeNamespace, params[:id])
old_namespace_attributes = ae_ns.attributes.clone
namespace_set_record_vars(ae_ns) # Set the record variables, but don't save
begin
ae_ns.save!
rescue StandardError => bang
add_flash(_("Error during 'save': %{message}") % {:message => bang.message}, :error)
javascript_flash(:spinner_off => true)
else
add_flash(_("%{model} \"%{name}\" was saved") % {:model => ui_lookup(:model => @edit[:typ]), :name => get_record_display_name(ae_ns)})
AuditEvent.success(build_saved_audit_hash_angular(old_namespace_attributes, ae_ns, false))
@sb[:action] = session[:edit] = nil # clean out the saved info
@in_a_form = false
replace_right_cell(:replace_trees => [:ae])
end
end
def add_update_method_cancel
if params[:id] && params[:id] != "new"
method = find_record_with_rbac(MiqAeMethod, params[:id])
add_flash(_("Edit of Automate Method \"%{name}\" was cancelled by the user") % {:name => method.name})
else
add_flash(_("Add of Automate Method was cancelled by the user"))
end
replace_right_cell
end
def add_update_method_add
method = params[:id] != "new" ? find_record_with_rbac(MiqAeMethod, params[:id]) : MiqAeMethod.new
method.name = params["name"]
method.display_name = params["display_name"]
method.location = params["location"]
method.language = params["language"]
method.scope = params["scope"]
method.class_id = params[:class_id]
method.options = set_playbook_data
begin
MiqAeMethod.transaction do
to_save, to_delete = playbook_inputs(method)
method.inputs.destroy(MiqAeField.where(:id => to_delete))
method.inputs = to_save
method.save!
end
rescue StandardError => bang
add_flash(_("Error during 'save': %{error_message}") % {:error_message => bang.message}, :error)
javascript_flash
else
old_method_attributes = method.attributes.clone
add_flash(_('Automate Method "%{name}" was saved') % {:name => method.name})
AuditEvent.success(build_saved_audit_hash_angular(old_method_attributes, method, params[:button] == "add"))
replace_right_cell(:replace_trees => [:ae])
nil
end
end
def add_update_method
assert_privileges("miq_ae_method_edit")
case params[:button]
when "cancel"
add_update_method_cancel
when "add", "save"
add_update_method_add
end
end
def update_method
assert_privileges("miq_ae_method_edit")
return unless load_edit("aemethod_edit__#{params[:id]}", "replace_cell__explorer")
get_method_form_vars
@changed = (@edit[:new] != @edit[:current])
case params[:button]
when "cancel"
@sb[:action] = session[:edit] = nil # clean out the saved info
add_flash(_("Edit of Automate Method \"%{name}\" was cancelled by the user") % {:name => @ae_method.name})
@in_a_form = false
replace_right_cell
when "save"
# dont allow save if expression has not been added or existing one has been removed
validate_expression("save") if @edit[:new][:location] == 'expression'
return if flash_errors?
ae_method = find_record_with_rbac(MiqAeMethod, params[:id])
set_method_record_vars(ae_method) # Set the record variables, but don't save
begin
MiqAeMethod.transaction do
set_input_vars(ae_method)
ae_method.inputs.destroy(MiqAeField.where(:id => @edit[:fields_to_delete]))
ae_method.inputs.each { |fld| fld.default_value = nil if fld.default_value == "" }
ae_method.embedded_methods = @edit[:new][:embedded_methods] if @edit[:new][:location] == 'inline'
ae_method.save!
end
rescue StandardError => bang
add_flash(_("Error during 'save': %{error_message}") % {:error_message => bang.message}, :error)
session[:changed] = @changed
@changed = true
javascript_flash
else
add_flash(_("Automate Method \"%{name}\" was saved") % {:name => ae_method.name})
AuditEvent.success(build_saved_audit(ae_method, @edit))
@sb[:action] = session[:edit] = nil # clean out the saved info
@in_a_form = false
replace_right_cell(:replace_trees => [:ae])
nil
end
when "reset"
set_method_form_vars
session[:changed] = @changed = false
@in_a_form = true
add_flash(_("All changes have been reset"), :warning)
@button = "reset"
replace_right_cell
else
@changed = session[:changed] = (@edit[:new] != @edit[:current])
replace_right_cell
end
end
def new
assert_privileges("miq_ae_class_new")
@ae_class = MiqAeClass.new
set_form_vars
@in_a_form = true
replace_right_cell
end
def new_instance
assert_privileges("miq_ae_instance_new")
initial_setup_for_instances_form_vars(nil)
set_instances_form_vars
@in_a_form = true
replace_right_cell
end
def new_method
assert_privileges("miq_ae_method_new")
@ae_method = MiqAeMethod.new
set_method_form_vars
@in_a_form = true
replace_right_cell
end
def create
assert_privileges("miq_ae_class_new")
return unless load_edit("aeclass_edit__new", "replace_cell__explorer")
get_form_vars
@in_a_form = true
case params[:button]
when "cancel"
add_flash(_("Add of new Automate Class was cancelled by the user"))
@in_a_form = false
replace_right_cell(:replace_trees => [:ae])
when "add"
add_aeclass = MiqAeClass.new
set_record_vars(add_aeclass) # Set the record variables, but don't save
begin
MiqAeClass.transaction do
add_aeclass.save!
end
rescue StandardError => bang
add_flash(_("Error during 'add': %{error_message}") % {:error_message => bang.message}, :error)
@in_a_form = true
javascript_flash
else
add_flash(_("Automate Class \"%{name}\" was added") % {:name => add_aeclass.fqname})
@in_a_form = false
add_active_node_to_open_nodes
replace_right_cell(:replace_trees => [:ae])
end
else
@changed = session[:changed] = (@edit[:new] != @edit[:current])
replace_right_cell(:replace_trees => [:ae])
end
end
def data_for_expression
{:db => @edit[:new][:exp_object],
:expression => @edit[:new][:expression]}.to_yaml
end
def create_method
assert_privileges("miq_ae_method_new")
@in_a_form = true
case params[:button]
when "cancel"
add_flash(_("Add of new Automate Method was cancelled by the user"))
@in_a_form = false
replace_right_cell
when "add"
return unless load_edit("aemethod_edit__new", "replace_cell__explorer")
get_method_form_vars
# dont allow add if expression has not been added or existing one has been removed
validate_expression("add") if @edit[:new][:location] == 'expression'
return if flash_errors?
add_aemethod = MiqAeMethod.new
set_method_record_vars(add_aemethod) # Set the record variables, but don't save
begin
MiqAeMethod.transaction do
add_aemethod.save!
set_field_vars(add_aemethod)
add_aemethod.save!
end
rescue StandardError => bang
add_flash(_("Error during 'add': %{error_message}") % {:error_message => bang.message}, :error)
@in_a_form = true
javascript_flash
else
add_flash(_("Automate Method \"%{name}\" was added") % {:name => add_aemethod.name})
@in_a_form = false
replace_right_cell(:replace_trees => [:ae])
end
else
@changed = session[:changed] = (@edit[:new] != @edit[:current])
add_active_node_to_open_nodes
replace_right_cell(:replace_trees => [:ae])
end
end
def create_namespace
assert_privileges("miq_ae_namespace_new")
return unless load_edit("aens_edit__new", "replace_cell__explorer")
add_ae_ns = if @edit[:typ] == "MiqAeDomain"
current_tenant.ae_domains.new
else
MiqAeNamespace.new(:parent_id => x_node.split('-')[1])
end
namespace_set_record_vars(add_ae_ns) # Set the record variables, but don't save
if add_ae_ns.valid? && !flash_errors? && add_ae_ns.save
add_flash(_("%{model} \"%{name}\" was added") % {:model => ui_lookup(:model => add_ae_ns.class.name), :name => get_record_display_name(add_ae_ns)})
@in_a_form = false
add_active_node_to_open_nodes
replace_right_cell(:replace_trees => [:ae])
else
add_ae_ns.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
javascript_flash(:spinner_off => true)
end
end
# AJAX driven routine to select a classification entry
def field_select
fields_get_form_vars
@combo_xml = build_type_options
@dtype_combo_xml = build_dtype_options
session[:field_data] = {}
@edit[:new_field][:substitute] = session[:field_data][:substitute] = true
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page.replace("class_fields_div", :partial => "class_fields")
page << javascript_for_miq_button_visibility(@changed)
page << "miqSparkle(false);"
end
end
# AJAX driven routine to select a classification entry
def field_accept
fields_get_form_vars
@changed = (@edit[:new] != @edit[:current])
@combo_xml = build_type_options
@dtype_combo_xml = build_dtype_options
render :update do |page|
page << javascript_prologue
page.replace("class_fields_div", :partial => "class_fields")
page << javascript_for_miq_button_visibility(@changed)
page << "miqSparkle(false);"
end
end
# AJAX driven routine to delete a classification entry
def field_delete
fields_get_form_vars
@combo_xml = build_type_options
@dtype_combo_xml = build_dtype_options
if params.key?(:id) && @edit[:fields_to_delete].exclude?(params[:id])
@edit[:fields_to_delete].push(params[:id])
end
@edit[:new][:fields].delete_at(params[:arr_id].to_i)
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page.replace("class_fields_div", :partial => "class_fields")
page << javascript_for_miq_button_visibility(@changed)
page << "miqSparkle(false);"
end
end
# AJAX driven routine to select a classification entry
def field_method_select
get_method_form_vars
@refresh_div = "inputs_div"
@refresh_partial = "inputs"
@changed = (@edit[:new] != @edit[:current])
@in_a_form = true
render :update do |page|
page << javascript_prologue
page.replace_html(@refresh_div, :partial => @refresh_partial)
if row_selected_in_grid?
page << javascript_show("class_methods_div")
page << javascript_focus('cls_field_name')
else
page << javascript_show("method_inputs_div")
page << javascript_focus('field_name')
end
page << javascript_for_miq_button_visibility(@changed)
page << javascript_show("inputs_div")
page << "miqSparkle(false);"
end
end
# AJAX driven routine to select a classification entry
def field_method_accept
get_method_form_vars
@refresh_div = "inputs_div"
@refresh_partial = "inputs"
session[:field_data] = {}
@changed = (@edit[:new] != @edit[:current])
@in_a_form = true
render :update do |page|
page << javascript_prologue
page.replace_html(@refresh_div, :partial => @refresh_partial)
page << if row_selected_in_grid?
javascript_show("class_methods_div")
else
javascript_show("method_inputs_div")
end
page << javascript_for_miq_button_visibility(@changed)
page << javascript_show("inputs_div")
page << "miqSparkle(false);"
end
end
# AJAX driven routine to delete a classification entry
def field_method_delete
get_method_form_vars
@refresh_div = "inputs_div"
@refresh_partial = "inputs"
if params.key?(:id) && @edit[:fields_to_delete].exclude?(params[:id])
@edit[:fields_to_delete].push(params[:id])
end
@edit[:new][:fields].delete_at(params[:arr_id].to_i)
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page.replace_html(@refresh_div, :partial => @refresh_partial)
page << if row_selected_in_grid?
javascript_show("class_methods_div")
else
javascript_show("method_inputs_div")
end
page << javascript_for_miq_button_visibility(@changed)
page << javascript_show("inputs_div")
page << "miqSparkle(false);"
end
end
def handle_up_down_buttons(hash_key, field_name)
case params[:button]
when 'up'
move_selected_fields_up(@edit[:new][hash_key], params[:seq_fields], field_name)
when 'down'
move_selected_fields_down(@edit[:new][hash_key], params[:seq_fields], field_name)
end
end
# Get variables from user edit form
def fields_seq_field_changed
return unless load_edit("fields_edit__seq", "replace_cell__explorer")
unless handle_up_down_buttons(:fields_list, _('Fields'))
render_flash
return
end
render :update do |page|
page << javascript_prologue
page.replace('column_lists', :partial => 'fields_seq_form')
@changed = (@edit[:new] != @edit[:current])
page << javascript_for_miq_button_visibility(@changed) if @changed
page << "miqSparkle(false);"
end
end
def fields_seq_edit
assert_privileges("miq_ae_field_seq")
case params[:button]
when "cancel"
@sb[:action] = session[:edit] = nil # clean out the saved info
add_flash(_("Edit of Class Schema Sequence was cancelled by the user"))
@in_a_form = false
replace_right_cell
when "save"
return unless load_edit("fields_edit__seq", "replace_cell__explorer")
ae_class = MiqAeClass.find(@edit[:ae_class_id])
indexed_ae_fields = ae_class.ae_fields.index_by(&:name)
@edit[:new][:fields_list].each_with_index do |f, i|
fname = f.split('(').last.split(')').first # leave display name and parenthesis out
indexed_ae_fields[fname].try(:priority=, i + 1)
end
unless ae_class.save
flash_validation_errors(ae_class)
@in_a_form = true
@changed = true
javascript_flash
return
end
AuditEvent.success(build_saved_audit(ae_class, @edit))
add_flash(_("Class Schema Sequence was saved"))
@sb[:action] = @edit = session[:edit] = nil # clean out the saved info
@in_a_form = false
replace_right_cell
when "reset", nil # Reset or first time in
id = params[:id] || @edit[:ae_class_id]
@in_a_form = true
fields_seq_edit_screen(id)
if params[:button] == "reset"
add_flash(_("All changes have been reset"), :warning)
end
replace_right_cell
end
end
def priority_form_field_changed
return unless load_edit(params[:id], "replace_cell__explorer")
@in_a_form = true
unless handle_up_down_buttons(:domain_order, _('Domains'))
render_flash
return
end
render :update do |page|
page << javascript_prologue
page.replace('domains_list',
:partial => 'domains_priority_form',
:locals => {:action => "domains_priority_edit"})
@changed = (@edit[:new] != @edit[:current])
page << javascript_for_miq_button_visibility(@changed) if @changed
page << "miqSparkle(false);"
end
end
def domains_priority_edit
assert_privileges("miq_ae_domain_priority_edit")
case params[:button]
when "cancel"
@sb[:action] = @in_a_form = @edit = session[:edit] = nil # clean out the saved info
add_flash(_("Edit of Priority Order was cancelled by the user"))
replace_right_cell
when "save"
return unless load_edit("priority__edit", "replace_cell__explorer")
domains = @edit[:new][:domain_order].reverse!.collect do |domain|
MiqAeDomain.find_by(:name => domain.split(' (Locked)').first).id
end
current_tenant.reset_domain_priority_by_ordered_ids(domains)
add_flash(_("Priority Order was saved"))
@sb[:action] = @in_a_form = @edit = session[:edit] = nil # clean out the saved info
replace_right_cell(:replace_trees => [:ae])
when "reset", nil # Reset or first time in
priority_edit_screen
add_flash(_("All changes have been reset"), :warning) if params[:button] == "reset"
session[:changed] = @changed = false
replace_right_cell
end
end
def objects_to_copy
ids = find_checked_items
if ids
items_without_prefix = []
ids.each do |item|
values = item.split("-")
# remove any namespaces that were selected in grid
items_without_prefix.push(values.last) unless values.first == "aen"
end
items_without_prefix
else
[params[:id]]
end
end
def copy_objects_reset(ids)
action = params[:pressed] || @sb[:action]
klass = case action
when 'miq_ae_class_copy' then MiqAeClass
when 'miq_ae_instance_copy' then MiqAeInstance
when 'miq_ae_method_copy' then MiqAeMethod
end
copy_reset(klass, ids, action)
end
def copy_objects
ids = objects_to_copy
if ids.blank?
add_flash(_("Copy does not apply to selected Automate Namespace"), :error)
@sb[:action] = session[:edit] = nil
@in_a_form = false
replace_right_cell
return
end
case params[:button]
when "cancel" then copy_cancel
when "copy" then copy_save
when "reset", nil then copy_objects_reset(ids)
end
end
def form_copy_objects_field_changed
return unless load_edit("copy_objects__#{params[:id]}", "replace_cell__explorer")
copy_objects_get_form_vars
build_automate_tree(:automate)
@changed = (@edit[:new] != @edit[:current])
@changed = @edit[:new][:override_source] if @edit[:new][:namespace].nil?
render :update do |page|
page << javascript_prologue
page.replace("flash_msg_div", :partial => "layouts/flash_msg")
page.replace("form_div", :partial => "copy_objects_form") if params[:domain] || params[:override_source]
page << javascript_for_miq_button_visibility(@changed)
end
end
def ae_tree_select_toggle
@edit = session[:edit]
self.x_active_tree = :ae_tree
at_tree_select_toggle(:automate, :namespace)
if params[:button] == 'submit'
x_node_set(@edit[:active_id], :automate_tree)
@edit[:namespace] = @edit[:new][:namespace]
end
session[:edit] = @edit
end
def embedded_methods_add
submit_embedded_method(CGI.unescape(params[:fqname]))
@selectable_methods = embedded_method_regex(MiqAeMethod.find(@edit[:ae_method_id]).fqname) if @edit[:ae_method_id]
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page << javascript_show("flash_msg_div")
page << javascript_for_miq_button_visibility(@changed)
page.replace("flash_msg_div", :partial => "layouts/flash_msg")
page.replace("embedded_methods_div", :partial => "embedded_methods")
end
end
def embedded_methods_remove
@edit[:new][:embedded_methods].delete_at(params[:id].to_i)
@selectable_methods = embedded_method_regex(MiqAeMethod.find(@edit[:ae_method_id]).fqname) if @edit[:ae_method_id]
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page << javascript_for_miq_button_visibility(@changed)
page.replace("embedded_methods_div", :partial => "embedded_methods")
page << "miqSparkle(false);"
end
end
def ae_tree_select
@edit = session[:edit]
at_tree_select(:namespace)
session[:edit] = @edit
end
def x_show
typ, id = params[:id].split("-")
@record = TreeBuilder.get_model_for_prefix(typ).constantize.find(id)
tree_select
end
def refresh_git_domain
if params[:button] == "save"
begin
git_based_domain_import_service.import(params[:git_repo_id], params[:git_branch_or_tag], current_tenant.id)
add_flash(_("Successfully refreshed!"), :info)
rescue MiqException::Error => err
add_flash(err.message, :error)
end
else
add_flash(_("Git based refresh canceled"), :info)
end
session[:edit] = nil
@in_a_form = false
replace_right_cell(:replace_trees => [:ae])
end
def namespace
assert_privileges("miq_ae_namespace_edit")
render :json => find_record_with_rbac(MiqAeNamespace, params[:id]).attributes.slice('name', 'description', 'enabled')
end
private
# Builds a regular expression that controls the selectable items in the ae_methods tree
def embedded_method_regex(fqname)
ids = MiqAeMethod.get_homonymic_across_domains(current_user, fqname).map { |m| "(#{m.id})" }
ids.join('|')
end
def playbook_inputs(method)
existing_inputs = method.inputs
new_inputs = params[:extra_vars] || []
inputs_to_save = []
inputs_to_delete = []
new_inputs.each do |i, input|
field = input.length == 4 ? MiqAeField.find_by(:id => input.last) : MiqAeField.new
field.name = input[0]
field.default_value = input[1] == "" ? nil : input[1]
field.datatype = input[2]
field.priority = i
inputs_to_save.push(field)
end
existing_inputs.each do |existing_input|
inputs_to_delete.push(existing_input.id) unless inputs_to_save.any? { |i| i.id == existing_input.id }
end
return inputs_to_save, inputs_to_delete
end
def set_playbook_data
params_list = %i[ansible_template_id
repository_id
playbook_id
credential_id
vault_credential_id
verbosity
network_credential_id
cloud_credential_id
execution_ttl
hosts
log_output]
params_hash = copy_params_if_set({}, params, params_list)
copy_boolean_params(params_hash, params, %i[become_enabled])
end
def angular_form_specific_data
@record = @ae_method
@ae_class = ae_class_for_instance_or_method(@ae_method)
@current_region = MiqRegion.my_region.region
@angular_form = true
end
def validate_expression(task)
if @edit[@expkey][:expression]["???"] == "???"
add_flash(_("Error during '%{task}': Expression element is required") % {:task => _(task)}, :error)
@in_a_form = true
javascript_flash
end
end
def features
[
{
:role => "miq_ae_class_explorer",
:role_any => true,
:name => :ae,
:title => _("Datastore")
}
].map { |hsh| ApplicationController::Feature.new_with_hash(hsh) }
end
def initial_setup_for_instances_form_vars(ae_inst_id)
@ae_inst = ae_inst_id ? MiqAeInstance.find(ae_inst_id) : MiqAeInstance.new
@ae_class = ae_class_for_instance_or_method(@ae_inst)
@ae_values = @ae_class.ae_fields.sort_by { |a| [a.priority.to_i] }.collect do |fld|
MiqAeValue.find_or_initialize_by(:field_id => fld.id.to_s, :instance_id => @ae_inst.id.to_s)
end
end
def instance_column_names
%w[name description display_name]
end
def field_column_names
%w[aetype collect datatype default_value display_name name on_entry on_error on_exit max_retries max_time substitute]
end
def value_column_names
%w[collect display_name on_entry on_error on_exit max_retries max_time value]
end
def method_input_column_names
%w[datatype default_value id name priority]
end
def copy_objects_get_form_vars
%w[domain override_existing override_source namespace new_name].each do |field|
fld = field.to_sym
if %w[override_existing override_source].include?(field)
@edit[:new][fld] = params[fld] == "1" if params[fld]
@edit[:new][:namespace] = nil if @edit[:new][:override_source]
else
@edit[:new][fld] = params[fld] if params[fld]
if fld == :domain && params[fld]
# save domain in sandbox, treebuilder doesnt have access to @edit
@sb[:domain_id] = params[fld]
@edit[:new][:namespace] = nil
@edit[:new][:new_name] = nil
end
end
end
end
def copy_save
assert_privileges(@sb[:action])
return unless load_edit("copy_objects__#{params[:id]}", "replace_cell__explorer")
begin
@record = @edit[:typ].find(@edit[:rec_id])
domain = MiqAeDomain.find(@edit[:new][:domain])
@edit[:new][:new_name] = nil if @edit[:new][:new_name] == @edit[:old_name]
options = {
:ids => @edit[:selected_items].keys,
:domain => domain.name,
:namespace => @edit[:new][:namespace],
:overwrite_location => @edit[:new][:override_existing],
:new_name => @edit[:new][:new_name],
:fqname => @edit[:fqname]
}
res = @edit[:typ].copy(options)
rescue StandardError => bang
render_flash(_("Error during '%{record} copy': %{error_message}") %
{:record => ui_lookup(:model => @edit[:typ].to_s), :error_message => bang.message}, :error)
return
end
model = @edit[:selected_items].count > 1 ? :models : :model
add_flash(_("Copy selected %{record} was saved") % {:record => ui_lookup(model => @edit[:typ].to_s)})
@record = res.kind_of?(Array) ? @edit[:typ].find(res.first) : res
self.x_node = "#{TreeBuilder.get_prefix_for_model(@edit[:typ])}-#{@record.id}"
@in_a_form = @changed = session[:changed] = false
@sb[:action] = @edit = session[:edit] = nil
replace_right_cell(:replace_trees => [:ae])
end
def copy_reset(typ, ids, button_pressed)
assert_privileges(button_pressed)
@changed = session[:changed] = @in_a_form = true
copy_objects_edit_screen(typ, ids, button_pressed)
if params[:button] == "reset"
add_flash(_("All changes have been reset"), :warning)
end
build_automate_tree(:automate)
replace_right_cell
end
def copy_cancel
assert_privileges(@sb[:action])
@record = session[:edit][:typ].find_by(:id => session[:edit][:rec_id])
model = @edit[:selected_items].count > 1 ? :models : :model
@sb[:action] = session[:edit] = nil # clean out the saved info
add_flash(_("Copy %{record} was cancelled by the user") % {:record => ui_lookup(model => @edit[:typ].to_s)})
@in_a_form = false
replace_right_cell
end
def copy_objects_edit_screen(typ, ids, button_pressed)
domains = {}
selected_items = {}
ids.each_with_index do |id, i|
record = find_record_with_rbac(typ, id)
selected_items[record.id] = record.display_name.blank? ? record.name : "#{record.display_name} (#{record.name})"
@record = record if i.zero?
end
current_tenant.editable_domains.collect { |domain| domains[domain.id] = domain_display_name(domain) }
initialize_copy_edit_vars(typ, button_pressed, domains, selected_items)
@sb[:domain_id] = domains.first.first
@edit[:current] = copy_hash(@edit[:new])
model = @edit[:selected_items].count > 1 ? :models : :model
@right_cell_text = _("Copy %{model}") % {:model => ui_lookup(model => typ.to_s)}
session[:edit] = @edit
end
def initialize_copy_edit_vars(typ, button_pressed, domains, selected_items)
@edit = {
:typ => typ,
:action => button_pressed,
:domain_name => @record.domain.name,
:domain_id => @record.domain.id,
:old_name => @record.name,
:fqname => @record.fqname,
:rec_id => @record.id,
:key => "copy_objects__#{@record.id}",
:domains => domains,
:selected_items => selected_items,
:namespaces => {}
}
@edit[:new] = {
:domain => domains.first.first,
:override_source => true,
:namespace => nil,
:new_name => nil,
:override_existing => false
}
end
def create_action_url(node)
if @sb[:action] == "miq_ae_domain_priority_edit"
'domains_priority_edit'
elsif @sb[:action] == 'miq_ae_field_seq'
'fields_seq_edit'
elsif MIQ_AE_COPY_ACTIONS.include?(@sb[:action])
'copy_objects'
else
prefix = @edit[:rec_id].nil? ? 'create' : 'update'
if node == 'aec'
suffix_hash = {
'instances' => '_instance',
'methods' => '_method',
'props' => '',
'schema' => '_fields'
}
suffix = suffix_hash[@sb[:active_tab]]
else
suffix_hash = {
'root' => '_namespace',
'aei' => '_instance',
'aem' => '_method',
'aen' => @edit.key?(:ae_class_id) ? '' : '_namespace'
}
suffix = suffix_hash[node]
end
prefix + suffix
end
end
def get_rec_name(rec)
column = rec.display_name.blank? ? :name : :display_name
if rec.kind_of?(MiqAeNamespace) && rec.domain?
editable_domain = editable_domain?(rec)
enabled_domain = rec.enabled
unless editable_domain && enabled_domain
return add_read_only_suffix(rec.send(column), editable_domain?(rec), enabled_domain)
end
end
rec.send(column)
end
# Delete all selected or single displayed aeclasses(s)
def deleteclasses
assert_privileges("miq_ae_class_delete")
delete_namespaces_or_classes
end
# Common aeclasses button handler routines
def process_aeclasses(aeclasses, task)
process_elements(aeclasses, MiqAeClass, task)
end
# Delete all selected or single displayed aeclasses(s)
def deleteinstances
assert_privileges('miq_ae_instance_delete')
ids = if (@sb[:row_selected] = find_checked_items).present?
@sb[:row_selected].map do |item|
item.split('-')[1]
end
else
Array.wrap(x_node.split('-')[1])
end
instances = find_records_with_rbac(MiqAeInstance, ids)
self.x_node = "aec-#{instances.first.class_id}" if @sb[:row_selected].nil?
process_aeinstances(instances.ids, 'destroy')
replace_right_cell(:replace_trees => [:ae])
end
# Common aeclasses button handler routines
def process_aeinstances(aeinstances, task)
process_elements(aeinstances, MiqAeInstance, task)
end
# Delete all selected or single displayed aeclasses(s)
def deletemethods
assert_privileges('miq_ae_method_delete')
ids = if (@sb[:row_selected] = find_checked_items).present?
@sb[:row_selected].map do |item|
item.split('-')[1]
end
else
Array.wrap(x_node.split('-')[1])
end
methods = find_records_with_rbac(MiqAeMethod, ids)
self.x_node = "aec-#{methods.first.class_id}" if @sb[:row_selected].nil?
process_aemethods(methods.ids, 'destroy')
replace_right_cell(:replace_trees => [:ae])
end
# Common aeclasses button handler routines
def process_aemethods(aemethods, task)
process_elements(aemethods, MiqAeMethod, task)
end
def delete_domain
assert_privileges("miq_ae_domain_delete")
aedomains = []
git_domains = []
if params[:id]
aedomains.push(params[:id])
self.x_node = "root"
else
selected = find_checked_items
selected_ids = selected.map { |x| x.split('-')[1] }
# TODO: replace with RBAC safe method #14665 is merged
domains = MiqAeDomain.where(:id => selected_ids)
domains.each do |domain|
if domain.editable_properties?
domain.git_enabled? ? git_domains.push(domain) : aedomains.push(domain.id)
else
add_flash(_("Read Only Automate Domain \"%{name}\" cannot be deleted") %
{:name => get_record_display_name(domain)}, :error)
end
end
end
process_elements(aedomains, MiqAeDomain, 'destroy') unless aedomains.empty?
git_domains.each do |domain|
process_element_destroy_via_queue(domain, domain.class, domain.name)
end
replace_right_cell(:replace_trees => [:ae])
end
# Delete all selected or single displayed aeclasses(s)
def delete_ns
assert_privileges("miq_ae_namespace_delete")
delete_namespaces_or_classes
end
def delete_namespaces_or_classes
selected = find_checked_items
ae_ns = []
ae_cs = []
node = x_node.split('-')
if params[:id] && params[:miq_grid_checks].blank? && node.first == "aen"
ae_ns.push(params[:id])
ns = find_record_with_rbac(MiqAeNamespace, node[1])
self.x_node = ns.parent_id ? "aen-#{ns.parent_id}" : "root"
elsif selected
ae_ns, ae_cs = items_to_delete(selected)
else
ae_cs.push(node[1])
cls = find_record_with_rbac(MiqAeClass, node[1])
self.x_node = "aen-#{cls.namespace_id}"
end
process_ae_ns(ae_ns, "destroy") unless ae_ns.empty?
process_aeclasses(ae_cs, "destroy") unless ae_cs.empty?
replace_right_cell(:replace_trees => [:ae])
end
def items_to_delete(selected)
ns_list = []
cs_list = []
selected.each do |items|
item = items.split('-')
if item[0] == "aen"
record = find_record_with_rbac(MiqAeNamespace, item[1])
if (record.domain? && record.editable_properties?) || record.editable?
ns_list.push(item[1])
else
add_flash(_("\"%{field}\" Automate Domain cannot be deleted") %
{:field => get_record_display_name(record)},
:error)
end
else
cs_list.push(item[1])
end
end
return ns_list, cs_list
end
# Common aeclasses button handler routines
def process_ae_ns(ae_ns, task)
process_elements(ae_ns, MiqAeNamespace, task)
end
# Get variables from edit form
def get_form_vars
@ae_class = MiqAeClass.find_by(:id => @edit[:ae_class_id])
# for class add tab
@edit[:new][:name] = params[:name].presence if params[:name]
@edit[:new][:description] = params[:description].presence if params[:description]
@edit[:new][:display_name] = params[:display_name].presence if params[:display_name]
@edit[:new][:namespace] = params[:namespace] if params[:namespace]
@edit[:new][:inherits] = params[:inherits_from] if params[:inherits_from]
# for class edit tab
@edit[:new][:name] = params[:cls_name].presence if params[:cls_name]
@edit[:new][:description] = params[:cls_description].presence if params[:cls_description]
@edit[:new][:display_name] = params[:cls_display_name].presence if params[:cls_display_name]
@edit[:new][:namespace] = params[:cls_namespace] if params[:cls_namespace]
@edit[:new][:inherits] = params[:cls_inherits_from] if params[:cls_inherits_from]
end
# Common routine to find checked items on a page (checkbox ids are "check_xxx" where xxx is the item id or index)
def find_checked_items(_prefix = nil)
# AE can't use ApplicationController#find_checked_items because that one expects non-prefixed ids
params[:miq_grid_checks].split(",") if params[:miq_grid_checks].present?
end
def field_attributes
%w[aetype class_id collect datatype default_value description
display_name id max_retries max_time message name on_entry
on_error on_exit priority substitute]
end
def row_selected_in_grid?
@sb[:row_selected] || x_node.split('-').first == "aec"
end
helper_method :row_selected_in_grid?
# these are written in angular
def playbook_style_location?(location)
%w[playbook ansible_job_template ansible_workflow_template].include?(location)
end
helper_method :playbook_style_location?
# Get variables from edit form
def fields_get_form_vars
@ae_class = MiqAeClass.find_by(:id => @edit[:ae_class_id])
@in_a_form = true
@in_a_form_fields = true
if params[:item].blank? && !%w[accept save].include?(params[:button]) && params["action"] != "field_delete"
field_data = session[:field_data]
new_field = @edit[:new_field]
field_attributes.each do |field|
field_name = "field_#{field}".to_sym
field_sym = field.to_sym
if field == "substitute"
field_data[field_sym] = new_field[field_sym] = params[field_name] == "1" if params[field_name]
elsif params[field_name]
field_data[field_sym] = new_field[field_sym] = params[field_name]
end
end
field_data[:default_value] = new_field[:default_value] = params[:field_password_value] if params[:field_password_value]
new_field[:priority] = 1
@edit[:new][:fields].each_with_index do |flds, i|
if i == @edit[:new][:fields].length - 1
new_field[:priority] = flds['priority'].nil? ? 1 : flds['priority'].to_i + 1
end
end
new_field[:class_id] = @ae_class.id
@edit[:new][:fields].each_with_index do |fld, i|
field_attributes.each do |field|
field_name = "fields_#{field}_#{i}"
if field == "substitute"
fld[field] = params[field_name] == "1" if params[field_name]
elsif %w[aetype datatype].include?(field)
var_name = "fields_#{field}#{i}"
fld[field] = params[var_name.to_sym] if params[var_name.to_sym]
elsif field == "default_value"
fld[field] = params[field_name] if params[field_name]
fld[field] = params["fields_password_value_#{i}".to_sym] if params["fields_password_value_#{i}".to_sym]
elsif params[field_name]
fld[field] = params[field_name]
end
end
end
elsif params[:button] == "accept"
if session[:field_data][:name].blank? || session[:field_data][:aetype].blank?
field = session[:field_data][:name].blank? ? "Name" : "Type"
field += " and Type" if field == "Name" && session[:field_data][:aetype].blank?
add_flash(_("%{field} is required") % {:field => field}, :error)
return
end
new_fields = {}
field_attributes.each do |field_attribute|
new_fields[field_attribute] = @edit[:new_field][field_attribute.to_sym]
end
@edit[:new][:fields].push(new_fields)
@edit[:new_field] = session[:field_data] = {}
end
end
def method_form_vars_process_fields(prefix = '')
@edit[:new][:fields].each_with_index do |field, i|
method_input_column_names.each do |column|
field[column] = params["#{prefix}fields_#{column}_#{i}".to_sym] if params["#{prefix}fields_#{column}_#{i}".to_sym]
next unless column == 'default_value'
field[column] = params["#{prefix}fields_value_#{i}".to_sym] if params["#{prefix}fields_value_#{i}".to_sym]
field[column] = params["#{prefix}fields_password_value_#{i}".to_sym] if params["#{prefix}fields_password_value_#{i}".to_sym]
end
end
end
# Get variables from edit form
def get_method_form_vars
@ae_method = @edit[:ae_method_id] ? MiqAeMethod.find(@edit[:ae_method_id]) : MiqAeMethod.new
@in_a_form = true
if params[:item].blank? && params[:button] != "accept" && params["action"] != "field_delete"
# for method_inputs view
@edit[:new][:name] = params[:method_name].presence if params[:method_name]
@edit[:new][:display_name] = params[:method_display_name].presence if params[:method_display_name]
@edit[:new][:location] ||= 'inline'
@edit[:new][:location] = params[:method_location] if params[:method_location]
@edit[:new][:data] = params[:method_data] if params[:method_data]
method_form_vars_process_fields
session[:field_data][:name] = @edit[:new_field][:name] = params[:field_name] if params[:field_name]
session[:field_data][:datatype] = @edit[:new_field][:datatype] = params[:field_datatype] if params[:field_datatype]
session[:field_data][:default_value] = @edit[:new_field][:default_value] = params[:field_default_value] if params[:field_default_value]
session[:field_data][:default_value] = @edit[:new_field][:default_value] = params[:field_password_value] if params[:field_password_value]
# for class_methods view
@edit[:new][:name] = params[:cls_method_name].presence if params[:cls_method_name]
@edit[:new][:display_name] = params[:cls_method_display_name].presence if params[:cls_method_display_name]
@edit[:new][:location] = params[:cls_method_location] if params[:cls_method_location]
@edit[:new][:data] = params[:cls_method_data] if params[:cls_method_data]
@edit[:new][:data] += "..." if params[:transOne] && params[:transOne] == "1" # Update the new data to simulate a change
method_form_vars_process_fields('cls_')
session[:field_data][:name] = @edit[:new_field][:name] = params[:cls_field_name] if params[:cls_field_name]
session[:field_data][:datatype] = @edit[:new_field][:datatype] = params[:cls_field_datatype] if params[:cls_field_datatype]
session[:field_data][:default_value] = @edit[:new_field][:default_value] = params[:cls_field_default_value] if params[:cls_field_default_value]
session[:field_data][:default_value] = @edit[:new_field][:default_value] = params[:cls_field_password_value] if params[:cls_field_password_value]
@edit[:new_field][:method_id] = @ae_method.id
session[:field_data] ||= {}
elsif params[:button] == "accept"
if @edit[:new_field].blank? || @edit[:new_field][:name].nil? || @edit[:new_field][:name] == ""
add_flash(_("Name is required"), :error)
return
end
new_field = {}
new_field['name'] = @edit[:new_field][:name]
new_field['datatype'] = @edit[:new_field][:datatype]
new_field['default_value'] = @edit[:new_field][:default_value]
new_field['method_id'] = @ae_method.id
@edit[:new][:fields].push(new_field)
@edit[:new_field] = {
:name => '',
:default_value => '',
:datatype => 'string'
}
elsif params[:add] == 'new'
session[:fields_data] = {
:name => '',
:default_value => '',
:datatype => 'string'
}
end
end
def get_instances_form_vars_for(prefix = nil)
instance_column_names.each do |key|
@edit[:new][:ae_inst][key] = params["#{prefix}inst_#{key}"].presence if params["#{prefix}inst_#{key}"]
end
@ae_class.ae_fields.sort_by { |a| [a.priority.to_i] }.each_with_index do |_fld, i|
%w[value collect on_entry on_exit on_error max_retries max_time].each do |key|
@edit[:new][:ae_values][i][key] = params["#{prefix}inst_#{key}_#{i}".to_sym] if params["#{prefix}inst_#{key}_#{i}".to_sym]
end
@edit[:new][:ae_values][i]["value"] = params["#{prefix}inst_password_value_#{i}".to_sym] if params["#{prefix}inst_password_value_#{i}".to_sym]
end
end
# Get variables from edit form
def get_instances_form_vars
# resetting inst/class/values from id stored in @edit.
@ae_inst = @edit[:ae_inst_id] ? MiqAeInstance.find(@edit[:ae_inst_id]) : MiqAeInstance.new
@ae_class = MiqAeClass.find(@edit[:ae_class_id])
@ae_values = @ae_class.ae_fields.sort_by { |a| a.priority.to_i }.collect do |fld|
MiqAeValue.find_or_initialize_by(:field_id => fld.id.to_s, :instance_id => @ae_inst.id.to_s)
end
if x_node.split('-').first == "aei"
# for instance_fields view
get_instances_form_vars_for
else
# for class_instances view
get_instances_form_vars_for("cls_")
end
end
# Set record variables to new values
def set_record_vars(miqaeclass)
miqaeclass.name = @edit[:new][:name].strip if @edit[:new][:name].present?
miqaeclass.display_name = @edit[:new][:display_name]
miqaeclass.description = @edit[:new][:description]
miqaeclass.inherits = @edit[:new][:inherits]
ns = x_node.split("-")
if ns.first == "aen" && !miqaeclass.namespace_id
rec = MiqAeNamespace.find(ns[1])
miqaeclass.namespace_id = rec.id.to_s
# miqaeclass.namespace = rec.name
end
end
# Set record variables to new values
def set_method_record_vars(miqaemethod)
miqaemethod.name = @edit[:new][:name].strip if @edit[:new][:name].present?
miqaemethod.display_name = @edit[:new][:display_name]
miqaemethod.scope = @edit[:new][:scope]
miqaemethod.location = @edit[:new][:location]
miqaemethod.language = @edit[:new][:language]
miqaemethod.data = if @edit[:new][:location] == 'expression'
data_for_expression
else
@edit[:new][:data]
end
miqaemethod.class_id = @edit[:ae_class_id]
miqaemethod.embedded_methods = @edit[:new][:embedded_methods] if @edit[:new][:location] == 'inline'
end
def namespace_set_record_vars(miqaens)
miqaens.name = params[:name].strip if params[:name].present?
miqaens.description = params[:description]
miqaens.enabled = params[:enabled] if miqaens.domain?
end
# Set record variables to new values
def set_field_vars(parent = nil)
fields = parent_fields(parent)
highest_priority = fields.count
@edit[:new][:fields].each_with_index do |fld, i|
if fld["id"].nil?
new_field = MiqAeField.new
highest_priority += 1
new_field.priority = highest_priority
if @ae_method
new_field.method_id = @ae_method.id
else
new_field.class_id = @ae_class.id
end
else
new_field = parent.nil? ? MiqAeField.find(fld["id"]) : fields.detect { |f| f.id == fld["id"] }
end
field_attributes.each do |attr|
if attr == "substitute" || @edit[:new][:fields][i][attr]
new_field.send("#{attr}=", @edit[:new][:fields][i][attr])
end
end
if new_field.new_record? || parent.nil?
raise StandardError, new_field.errors.full_messages[0] unless fields.push(new_field)
end
end
reset_field_priority(fields)
end
alias set_input_vars set_field_vars
def parent_fields(parent)
return [] unless parent
parent.class == MiqAeClass ? parent.ae_fields : parent.inputs
end
def reset_field_priority(fields)
# reset priority to be in order 1..3
i = 0
fields.sort_by { |a| [a.priority.to_i] }.each do |fld|
if !@edit[:fields_to_delete].include?(fld.id.to_s) || fld.id.blank?
i += 1
fld.priority = i
end
end
fields
end
# Set record variables to new values
def set_instances_record_vars(miqaeinst)
instance_column_names.each do |attr|
miqaeinst.send("#{attr}=", @edit[:new][:ae_inst][attr].try(:strip))
end
miqaeinst.class_id = @edit[:ae_class_id]
end
# Set record variables to new values
def set_instances_value_vars(vals, ae_instance = nil)
original_values = ae_instance ? ae_instance.ae_values : []
vals.each_with_index do |v, i|
original = original_values.detect { |ov| ov.id == v.id } unless original_values.empty?
if original
v = original
elsif ae_instance
ae_instance.ae_values << v
end
value_column_names.each do |attr|
v.send("#{attr}=", @edit[:new][:ae_values][i][attr]) if @edit[:new][:ae_values][i][attr]
end
end
end
def fields_seq_edit_screen(id)
@edit = {}
@edit[:new] = {}
@edit[:current] = {}
@ae_class = MiqAeClass.find_by(:id => id)
@edit[:rec_id] = @ae_class.try(:id)
@edit[:ae_class_id] = @ae_class.id
@edit[:new][:fields] = @ae_class.ae_fields.to_a.deep_clone
@edit[:new][:fields_list] = @edit[:new][:fields]
.sort_by { |f| f.priority.to_i }
.collect { |f| f.display_name ? "#{f.display_name} (#{f.name})" : "(#{f.name})" }
@edit[:key] = "fields_edit__seq"
@edit[:current] = copy_hash(@edit[:new])
@right_cell_text = _("Edit of Class Schema Sequence '%{name}'") % {:name => @ae_class.name}
session[:edit] = @edit
end
def move_selected_fields_up(available_fields, selected_fields, display_name)
if no_items_selected?(selected_fields)
add_flash(_("No %{name} were selected to move up") % {:name => display_name}, :error)
return false
end
consecutive, first_idx, last_idx = selected_consecutive?(available_fields, selected_fields)
@selected = selected_fields
if consecutive
if first_idx.positive?
available_fields[first_idx..last_idx].reverse_each do |field|
pulled = available_fields.delete(field)
available_fields.insert(first_idx - 1, pulled)
end
end
return true
end
add_flash(_("Select only one or consecutive %{name} to move up") % {:name => display_name}, :error)
false
end
def move_selected_fields_down(available_fields, selected_fields, display_name)
if no_items_selected?(selected_fields)
add_flash(_("No %{name} were selected to move down") % {:name => display_name}, :error)
return false
end
consecutive, first_idx, last_idx = selected_consecutive?(available_fields, selected_fields)
@selected = selected_fields
if consecutive
if last_idx < available_fields.length - 1
insert_idx = last_idx + 1 # Insert before the element after the last one
insert_idx = -1 if last_idx == available_fields.length - 2 # Insert at end if 1 away from end
available_fields[first_idx..last_idx].each do |field|
pulled = available_fields.delete(field)
available_fields.insert(insert_idx, pulled)
end
end
return true
end
add_flash(_("Select only one or consecutive %{name} to move down") % {:name => display_name}, :error)
false
end
def no_items_selected?(field_name)
field_name.blank? || field_name[0] == ""
end
def selected_consecutive?(available_fields, selected_fields)
first_idx = last_idx = 0
available_fields.each_with_index do |nf, idx|
first_idx = idx if nf == selected_fields.first
if nf == selected_fields.last
last_idx = idx
break
end
end
if last_idx - first_idx + 1 > selected_fields.length
[false, first_idx, last_idx]
else
[true, first_idx, last_idx]
end
end
def edit_domain_or_namespace
obj = find_checked_items
obj = [x_node] if obj.nil? && params[:id]
typ = params[:pressed] == "miq_ae_domain_edit" ? MiqAeDomain : MiqAeNamespace
@ae_ns = find_record_with_rbac(typ, obj[0].split('-')[1])
if @ae_ns.domain? && !@ae_ns.editable_properties?
add_flash(_("Read Only Automate Domain \"%{name}\" cannot be edited") %
{:name => get_record_display_name(@ae_ns)},
:error)
else
ns_set_form_vars
@in_a_form = true
@angular_form = true
session[:changed] = @changed = false
end
replace_right_cell
end
def new_ns
assert_privileges("miq_ae_namespace_new")
new_domain_or_namespace(MiqAeNamespace)
end
def new_domain
assert_privileges("miq_ae_domain_new")
new_domain_or_namespace(MiqAeDomain)
end
def new_domain_or_namespace(klass)
parent_id = x_node == "root" ? nil : x_node.split("-").last
@ae_ns = klass.new(:parent_id => parent_id)
ns_set_form_vars
@in_a_form = true
@angular_form = true
replace_right_cell
end
# Set form variables for edit
def ns_set_form_vars
session[:field_data] = session[:edit] = {}
@edit = {
:ae_ns_id => @ae_ns.id,
:typ => @ae_ns.domain? ? "MiqAeDomain" : "MiqAeNamespace",
:key => "aens_edit__#{@ae_ns.id || "new"}",
:rec_id => @ae_ns.id || nil
}
@edit[:new] = {
:ns_name => @ae_ns.name,
:ns_description => @ae_ns.description
}
# set these field for a new domain or when existing record is a domain
@edit[:new][:enabled] = @ae_ns.enabled if @ae_ns.domain?
@edit[:current] = @edit[:new].dup
@right_cell_text = ns_right_cell_text
session[:edit] = @edit
end
def ns_right_cell_text
model = ui_lookup(:model => @edit[:typ])
name_for_msg = if @edit[:rec_id].nil?
_("Adding a new %{model}") % {:model => model}
else
_("Editing %{model} \"%{name}\"") % {:model => model, :name => @ae_ns.name}
end
name_for_msg
end
def ordered_domains_for_priority_edit_screen
User.current_tenant.sequenceable_domains.collect(&:name)
end
def priority_edit_screen
@in_a_form = true
@edit = {
:key => "priority__edit",
:new => {:domain_order => ordered_domains_for_priority_edit_screen}
}
@edit[:current] = copy_hash(@edit[:new])
session[:edit] = @edit
end
def domain_toggle(locked)
assert_privileges("miq_ae_domain_#{locked ? 'lock' : 'unlock'}")
action = locked ? _("Locked") : _("Unlocked")
if params[:id].nil?
add_flash(_("No Automate Domain were selected to be marked as %{action}") % {:action => action}, :error)
javascript_flash
end
domain_toggle_lock(params[:id], locked)
unless flash_errors?
add_flash(_("The selected Automate Domain were marked as %{action}") % {:action => action}, :info, true)
end
replace_right_cell(:replace_trees => [:ae])
end
def domain_lock
domain_toggle(true)
end
def domain_unlock
domain_toggle(false)
end
def domain_toggle_lock(domain_id, lock)
domain = MiqAeDomain.find(domain_id)
lock ? domain.lock_contents! : domain.unlock_contents!
end
def git_refresh
@in_a_form = true
@explorer = true
session[:changed] = true
git_repo = MiqAeDomain.find(params[:id]).git_repository
git_based_domain_import_service.refresh(git_repo.id)
git_repo.reload
@branch_names = git_repo.git_branches.collect(&:name)
@tag_names = git_repo.git_tags.collect(&:name)
@git_repo_id = git_repo.id
@right_cell_text = _("Refreshing branch/tag for Git-based Domain")
presenter = ExplorerPresenter.new(
:active_tree => x_active_tree,
:right_cell_text => @right_cell_text,
:remove_nodes => nil,
:add_nodes => nil
)
update_partial_div = :main_div
update_partial = "git_domain_refresh"
presenter.update(update_partial_div, r[:partial => update_partial])
action_url = "refresh_git_domain"
presenter.show(:paging_div, :form_buttons_div)
presenter.update(:form_buttons_div, r[
:partial => "layouts/x_edit_buttons",
:locals => {
:record_id => git_repo.id,
:action_url => action_url,
:serialize => true,
:no_reset => true
}
])
presenter.show(:toolbar)
render :json => presenter.for_render
end
def git_based_domain_import_service
@git_based_domain_import_service ||= GitBasedDomainImportService.new
end
def get_instance_node_info(node_id)
begin
@record = MiqAeInstance.find(node_id)
rescue ActiveRecord::RecordNotFound
set_root_node
return
end
@ae_class = @record.ae_class
@sb[:active_tab] = "instances"
domain_overrides
set_right_cell_text(x_node, @record)
end
def get_method_node_info(node_id)
begin
@record = @ae_method = MiqAeMethod.find(node_id)
rescue ActiveRecord::RecordNotFound
set_root_node
return
end
@ae_class = @record.ae_class
@sb[:squash_state] = true
@sb[:active_tab] = "methods"
if @record.location == 'expression'
hash = YAML.load(@record.data)
@expression = hash[:expression] ? MiqExpression.new(hash[:expression]).to_human : ""
elsif playbook_style_location?(@record.location)
@playbook_details = fetch_playbook_details(@record)
end
domain_overrides
set_right_cell_text(x_node, @record)
end
def fetch_manager_name(ansible_template_id)
return nil if ansible_template_id.blank?
ManageIQ::Providers::ExternalAutomationManager::ConfigurationScript.find_by(:id => ansible_template_id)&.manager&.name
end
def fetch_playbook_details(record)
options = record.options
details = {
:repository => fetch_name_from_object(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::ConfigurationScriptSource, options[:repository_id]),
:playbook => fetch_name_from_object(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::Playbook, options[:playbook_id]),
:machine_credential => fetch_name_from_object(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::MachineCredential, options[:credential_id]),
:verbosity => options[:verbosity],
:become_enabled => options[:become_enabled] == true ? _("Yes") : _("No"),
:execution_ttl => options[:execution_ttl],
:hosts => options[:hosts],
:log_output => options[:log_output],
:ansible_template_id => options[:ansible_template_id],
:manager_name => fetch_manager_name(options[:ansible_template_id]),
}
details[:network_credential] = fetch_name_from_object(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::NetworkCredential, options[:network_credential_id]) if options[:network_credential_id]
details[:cloud_credential] = fetch_name_from_object(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::CloudCredential, options[:cloud_credential_id]) if options[:cloud_credential_id]
details[:vault_credential] = fetch_name_from_object(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::VaultCredential, options[:vault_credential_id]) if options[:vault_credential_id]
details[:ansible_template] = fetch_name_from_object(ManageIQ::Providers::ExternalAutomationManager::ConfigurationScript, options[:ansible_template_id]) if options[:ansible_template_id]
details
end
def get_class_node_info(node_id)
@sb[:active_tab] = "instances" if !@in_a_form && !params[:button] && !params[:pressed]
begin
@record = @ae_class = MiqAeClass.find(node_id)
rescue ActiveRecord::RecordNotFound
set_root_node
return
end
@combo_xml = build_type_options
# passing fields because that's how many combo boxes we need
@dtype_combo_xml = build_dtype_options
@grid_methods_list_xml = build_details_grid(@record.ae_methods)
domain_overrides
set_right_cell_text(x_node, @record)
end
def domain_overrides
@domain_overrides = {}
typ, = x_node.split('-')
overrides = TreeBuilder.get_model_for_prefix(typ).constantize.get_homonymic_across_domains(current_user, @record.fqname)
overrides.each do |obj|
display_name, id = domain_display_name_using_name(obj, @record.domain.name)
@domain_overrides[display_name] = id
end
end
def title
_("Datastore")
end
def session_key_prefix
"miq_ae_class"
end
def get_session_data
super
@edit = session[:edit]
end
def flash_validation_errors(am_obj)
am_obj.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
end
def add_active_node_to_open_nodes
return unless @sb.dig('trees', 'ae_tree', 'open_nodes')
@sb['trees']['ae_tree']['open_nodes'].push(@sb['trees']['ae_tree']['active_node']).uniq!
end
menu_section :automate
def process_element_destroy_via_queue(element, klass, name)
return unless element.respond_to?(:destroy)
audit = {:event => "#{klass.name.downcase}_record_delete",
:message => "[#{name}] Record deleted",
:target_id => element.id,
:target_class => klass.base_class.name,
:userid => session[:userid]}
model_name = ui_lookup(:model => klass.name) # Lookup friendly model name in dictionary
record_name = get_record_display_name(element)
begin
git_based_domain_import_service.destroy_domain(element.id)
AuditEvent.success(audit)
add_flash(_("%{model} \"%{name}\": Delete successful") % {:model => model_name, :name => record_name})
rescue StandardError => bang
add_flash(_("%{model} \"%{name}\": Error during delete: %{error_msg}") %
{:model => model_name, :name => record_name, :error_msg => bang.message}, :error)
end
end
def breadcrumbs_options
{
:breadcrumbs => [
{:title => _("Automation")},
{:title => _("Automate")},
{:title => _("Explorer")},
],
}
end
def accord_name
features.find { |f| f.accord_name == x_active_accord.to_s }.try(:title)
end
end
| 36.414506 | 236 | 0.640026 |
edcb1a8de836accad80babbbeaff277e38ce4e9e | 1,544 | require 'rails_helper'
RSpec.describe Project::BaseValidator, type: :model do
let(:project_state) { 'draft' }
let(:project) { create(:project, state: project_state, mode: 'flex') }
context "when project is going to online to end state" do
subject { project }
Project::ON_ONLINE_TO_END_STATES.each do |state|
context "#{state} project validations" do
let(:project_state) { state }
it { is_expected.to validate_presence_of :about_html }
it { is_expected.to validate_presence_of :headline }
it { is_expected.to validate_numericality_of(:online_days).is_less_than_or_equal_to(365).is_greater_than_or_equal_to(1).allow_nil }
end
context "#{state} project relation validations" do
let(:project_state) { state }
context "when user bank account is not present" do
before do
project.user.bank_account = nil
project.valid?
end
it { expect(project.errors['bank_account']).not_to be_nil }
end
context "when user as missing some required fields" do
before do
project.user.uploaded_image = nil
project.user.about_html = nil
project.user.name = nil
project.valid?
end
[:uploaded_image, :about_html, :name].each do |attr|
it "should have error user.#{attr.to_s}" do
expect(project.errors['user.' + attr.to_s]).not_to be_nil
end
end
end
end
end
end
end
| 30.27451 | 139 | 0.621114 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.