hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
38ac7423934e73913880971d57f3886d8738d0bc | 1,613 | # -*- encoding: utf-8 -*-
# stub: state_machines 0.5.0 ruby lib
Gem::Specification.new do |s|
s.name = "state_machines".freeze
s.version = "0.5.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Abdelkader Boudih".freeze, "Aaron Pfeifer".freeze]
s.date = "2017-06-20"
s.description = "Adds support for creating state machines for attributes on any Ruby class".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.homepage = "https://github.com/state-machines/state_machines".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "2.7.10".freeze
s.summary = "State machines for attributes".freeze
s.installed_by_version = "2.7.10" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>.freeze, [">= 1.7.6"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<minitest>.freeze, [">= 5.4"])
else
s.add_dependency(%q<bundler>.freeze, [">= 1.7.6"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<minitest>.freeze, [">= 5.4"])
end
else
s.add_dependency(%q<bundler>.freeze, [">= 1.7.6"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<minitest>.freeze, [">= 5.4"])
end
end
| 40.325 | 112 | 0.67142 |
3300757862627c1f7c96f226bd7d3782e2c548dc | 304 | class Password < ActiveRecord::Base
include CryptKey
acts_as_paranoid
belongs_to :user
before_create :create_reset_key
private
def create_reset_key
while reset_key.blank? || Password.with_deleted.find_by_reset_key(reset_key)
self.reset_key = crypt_key[0..15]
end
end
end
| 16.888889 | 80 | 0.75 |
21961dee40fa0bbe0a1b94f463f4a90ae30ef724 | 2,857 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataShare::Mgmt::V2018_11_01_preview
module Models
#
# List response for get ShareSubscription.
#
class ProviderShareSubscriptionList
include MsRestAzure
include MsRest::JSONable
# @return [String] The Url of next result page.
attr_accessor :next_link
# @return [Array<ProviderShareSubscription>] Collection of items of type
# DataTransferObjects.
attr_accessor :value
# return [Proc] with next page method call.
attr_accessor :next_method
#
# Gets the rest of the items for the request, enabling auto-pagination.
#
# @return [Array<ProviderShareSubscription>] operation results.
#
def get_all_items
items = @value
page = self
while page.next_link != nil && !page.next_link.strip.empty? do
page = page.get_next_page
items.concat(page.value)
end
items
end
#
# Gets the next page of results.
#
# @return [ProviderShareSubscriptionList] with next page content.
#
def get_next_page
response = @next_method.call(@next_link).value! unless @next_method.nil?
unless response.nil?
@next_link = response.body.next_link
@value = response.body.value
self
end
end
#
# Mapper for ProviderShareSubscriptionList class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ProviderShareSubscriptionList',
type: {
name: 'Composite',
class_name: 'ProviderShareSubscriptionList',
model_properties: {
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
},
value: {
client_side_validation: true,
required: true,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ProviderShareSubscriptionElementType',
type: {
name: 'Composite',
class_name: 'ProviderShareSubscription'
}
}
}
}
}
}
}
end
end
end
end
| 28.858586 | 80 | 0.540777 |
21dd29e95c797749723f34f04180807aae42f81b | 2,083 | require "language/node"
class AtomistCli < Formula
desc "Unified command-line tool for interacting with Atomist services"
homepage "https://github.com/atomist/cli#readme"
url "https://registry.npmjs.org/@atomist/cli/-/cli-1.8.0.tgz"
sha256 "64bcc7484fa2f1b7172984c278ae928450149fb02b750f79454b1a6683d17f62"
license "Apache-2.0"
bottle do
rebuild 1
sha256 arm64_big_sur: "aa7a1df34f9d6914158696305ef167f422ac4571137e01483e00bc4f637c251c"
sha256 big_sur: "6dd88e8522cd4cf5d53b17f796aef1eca9cbe1c602c00c892d2f30eb73db0d39"
sha256 catalina: "c622ee3ba1742b49887892d30cead992cb34f4f28e68626b03b20a73bd88ba9d"
sha256 mojave: "d5f0927cbfcf78438a0affe17488467727659c5caf9de3a65f9ed565bd23529c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "fd2e81f85c45f1f3ecdd194acddb30013e3420703fc244ed30711146ea4f1fcf" # linuxbrew-core
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
bash_completion.install "#{libexec}/lib/node_modules/@atomist/cli/assets/bash_completion/atomist"
end
test do
assert_predicate bin/"atomist", :exist?
assert_predicate bin/"atomist", :executable?
assert_predicate bin/"@atomist", :exist?
assert_predicate bin/"@atomist", :executable?
run_output = shell_output("#{bin}/atomist 2>&1", 1)
assert_match "Not enough non-option arguments", run_output
assert_match "Specify --help for available options", run_output
version_output = shell_output("#{bin}/atomist --version")
assert_match "@atomist/cli", version_output
assert_match "@atomist/sdm ", version_output
assert_match "@atomist/sdm-core", version_output
assert_match "@atomist/sdm-local", version_output
skill_output = shell_output("#{bin}/atomist show skills")
assert_match(/\d+ commands are available from \d+ connected SDMs/, skill_output)
end
end
| 44.319149 | 139 | 0.719635 |
1d476cc31f2b1fe0ac95b0f85f9fdd62ca9d436c | 357 | # frozen_string_literal: true
class RenameDocumentJoinTables < ActiveRecord::Migration
def change
rename_table :documents_alignments, :document_alignments
rename_table :documents_keywords, :document_keywords
rename_table :documents_languages, :document_languages
rename_table :documents_resource_types, :document_resource_types
end
end
| 32.454545 | 68 | 0.829132 |
18126f340cc3413e2fc16400500919a99f26f355 | 9,772 | require 'puppet/face'
require 'puppet/settings/ini_file'
Puppet::Face.define(:config, '0.0.1') do
extend Puppet::Util::Colors
copyright "Puppet Inc.", 2011
license _("Apache 2 license; see COPYING")
summary _("Interact with Puppet's settings.")
description "This subcommand can inspect and modify settings from Puppet's
'puppet.conf' configuration file. For documentation about individual settings,
see https://puppet.com/docs/puppet/latest/configuration.html."
DEFAULT_SECTION_MARKER = Object.new
DEFAULT_SECTION = "main"
option "--section " + _("SECTION_NAME") do
default_to { DEFAULT_SECTION_MARKER } #Sentinel object for default detection during commands
summary _("The section of the configuration file to interact with.")
description <<-EOT
The section of the puppet.conf configuration file to interact with.
The three most commonly used sections are 'main', 'master', and 'agent'.
'Main' is the default, and is used by all Puppet applications. Other
sections can override 'main' values for specific applications --- the
'master' section affects Puppet Server, and the 'agent'
section affects puppet agent.
Less commonly used is the 'user' section, which affects puppet apply. Any
other section will be treated as the name of a legacy environment
(a deprecated feature), and can only include the 'manifest' and
'modulepath' settings.
EOT
end
action(:print) do
summary _("Examine Puppet's current settings.")
arguments _("(all | <setting> [<setting> ...]")
description <<-'EOT'
Prints the value of a single setting or a list of settings.
This action is a replacement interface to the information available with
`puppet <subcommand> --configprint`.
EOT
notes <<-'EOT'
By default, this action reads the general configuration in the 'main'
section. Use the '--section' and '--environment' flags to examine other
configuration domains.
EOT
examples <<-'EOT'
Get puppet's runfile directory:
$ puppet config print rundir
Get a list of important directories from the master's config:
$ puppet config print all --section master | grep -E "(path|dir)"
EOT
when_invoked do |*args|
options = args.pop
@default_section = false
if options[:section] == DEFAULT_SECTION_MARKER
options[:section] = DEFAULT_SECTION
@default_section = true
end
render_all_settings = args.empty? || args == ['all']
args = Puppet.settings.to_a.collect(&:first) if render_all_settings
values_from_the_selected_section =
Puppet.settings.values(nil, options[:section].to_sym)
loader_settings = {
:environmentpath => values_from_the_selected_section.interpolate(:environmentpath),
:basemodulepath => values_from_the_selected_section.interpolate(:basemodulepath),
}
to_be_rendered = nil
Puppet.override(Puppet.base_context(loader_settings),
_("New environment loaders generated from the requested section.")) do
# And now we can lookup values that include those from environments configured from
# the requested section
values = Puppet.settings.values(Puppet[:environment].to_sym, options[:section].to_sym)
if Puppet::Util::Log.sendlevel?(:info)
warn_default_section(options[:section]) if @default_section
report_section_and_environment(options[:section], Puppet.settings[:environment])
end
to_be_rendered = {}
args.sort.each do |setting_name|
to_be_rendered[setting_name] = values.print(setting_name.to_sym)
end
end
# convert symbols to strings before formatting output
if render_all_settings
to_be_rendered = stringifyhash(to_be_rendered)
end
to_be_rendered
end
when_rendering :console do |to_be_rendered|
output = ''
if to_be_rendered.keys.length > 1
to_be_rendered.keys.sort.each do |setting|
output << "#{setting} = #{to_be_rendered[setting]}\n"
end
else
output << "#{to_be_rendered.to_a[0].last}\n"
end
output
end
end
def stringifyhash(hash)
newhash = {}
hash.each do |key, val|
key = key.to_s
if val.is_a? Hash
newhash[key] = stringifyhash(val)
elsif val.is_a? Symbol
newhash[key] = val.to_s
else
newhash[key] = val
end
end
newhash
end
def warn_default_section(section_name)
messages = []
messages << _("No section specified; defaulting to '%{section_name}'.") %
{ section_name: section_name }
#TRANSLATORS '--section' is a command line option and should not be translated
messages << _("Set the config section by using the `--section` flag.")
#TRANSLATORS `puppet config --section user print foo` is a command line example and should not be translated
messages << _("For example, `puppet config --section user print foo`.")
messages << _("For more information, see https://puppet.com/docs/puppet/latest/configuration.html")
Puppet.warning(messages.join("\n"))
end
def report_section_and_environment(section_name, environment_name)
$stderr.puts colorize(:hyellow,
_("Resolving settings from section '%{section_name}' in environment '%{environment_name}'") %
{ section_name: section_name, environment_name: environment_name })
end
action(:set) do
summary _("Set Puppet's settings.")
arguments _("[setting_name] [setting_value]")
description <<-'EOT'
Updates values in the `puppet.conf` configuration file.
EOT
notes <<-'EOT'
By default, this action manipulates the configuration in the
'main' section. Use the '--section' flag to manipulate other
configuration domains.
EOT
examples <<-'EOT'
Set puppet's runfile directory:
$ puppet config set rundir /var/run/puppetlabs
Set the vardir for only the agent:
$ puppet config set vardir /opt/puppetlabs/puppet/cache --section agent
EOT
when_invoked do |name, value, options|
@default_section = false
if options[:section] == DEFAULT_SECTION_MARKER
options[:section] = DEFAULT_SECTION
@default_section = true
end
if name == 'environment' && options[:section] == 'main'
Puppet.warning _(<<-EOM).chomp
The environment should be set in either the `[user]`, `[agent]`, or `[master]`
section. Variables set in the `[agent]` section are used when running
`puppet agent`. Variables set in the `[user]` section are used when running
various other puppet subcommands, like `puppet apply` and `puppet module`; these
require the defined environment directory to exist locally. Set the config
section by using the `--section` flag. For example,
`puppet config --section user set environment foo`. For more information, see
https://puppet.com/docs/puppet/latest/configuration.html#environment
EOM
end
if Puppet::Util::Log.sendlevel?(:info)
report_section_and_environment(options[:section], Puppet.settings[:environment])
end
path = Puppet::FileSystem.pathname(Puppet.settings.which_configuration_file)
Puppet::FileSystem.touch(path)
Puppet::FileSystem.open(path, nil, 'r+:UTF-8') do |file|
Puppet::Settings::IniFile.update(file) do |config|
config.set(options[:section], name, value)
end
end
nil
end
end
action(:delete) do
summary _("Delete a Puppet setting.")
arguments _("(<setting>")
#TRANSLATORS 'main' is a specific section name and should not be translated
description "Deletes a setting from the specified section. (The default is the section 'main')."
notes <<-'EOT'
By default, this action deletes the configuration setting from the 'main'
configuration domain. Use the '--section' flags to delete settings from other
configuration domains.
EOT
examples <<-'EOT'
Delete the setting 'setting_name' from the 'main' configuration domain:
$ puppet config delete setting_name
Delete the setting 'setting_name' from the 'master' configuration domain:
$ puppet config delete setting_name --section master
EOT
when_invoked do |name, options|
@default_section = false
if options[:section] == DEFAULT_SECTION_MARKER
options[:section] = DEFAULT_SECTION
@default_section = true
end
path = Puppet::FileSystem.pathname(Puppet.settings.which_configuration_file)
if Puppet::FileSystem.exist?(path)
Puppet::FileSystem.open(path, nil, 'r+:UTF-8') do |file|
Puppet::Settings::IniFile.update(file) do |config|
setting_string = config.delete(options[:section], name)
if setting_string
if Puppet::Util::Log.sendlevel?(:info)
report_section_and_environment(options[:section], Puppet.settings[:environment])
end
puts(_("Deleted setting from '%{section_name}': '%{setting_string}'") %
{ section_name: options[:section], name: name, setting_string: setting_string.strip })
else
Puppet.warning(_("No setting found in configuration file for section '%{section_name}' setting name '%{name}'") %
{ section_name: options[:section], name: name })
end
end
end
else
#TRANSLATORS the 'puppet.conf' is a specific file and should not be translated
Puppet.warning(_("The puppet.conf file does not exist %{puppet_conf}") % { puppet_conf: path })
end
nil
end
end
end
| 36.736842 | 127 | 0.670078 |
e23039d87b3a8a6990eed68404a9ee732e828355 | 7,075 | class V8 < Formula
desc "Google's JavaScript engine"
homepage "https://github.com/v8/v8/wiki"
# Track V8 version from Chrome stable: https://omahaproxy.appspot.com
# revert back to GitHub mirror tar.gz archives once it's synced again
url "https://chromium.googlesource.com/v8/v8.git",
tag: "9.5.172.25",
revision: "b5fa92428c9d4516ebdc72643ea980d8bde8f987"
license "BSD-3-Clause"
livecheck do
url "https://omahaproxy.appspot.com/all.json?os=mac&channel=stable"
regex(/"v8_version": "v?(\d+(?:\.\d+)+)"/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "35837e31a667ed1650e8359ea5987adaa78fedf90ee2a091b21d99b78532aa05"
sha256 cellar: :any, big_sur: "a80774979953516b1025ef316d3639494ecde0e502d2ad31e599f86ba12ed7b4"
sha256 cellar: :any, catalina: "f3222d7783da193604cd81256b69d9efe1cf1625ac22eaeeb754149cd1364f5e"
sha256 cellar: :any_skip_relocation, x86_64_linux: "92e50af4569f2a489feefc1c3f12f286c9b9e0a7e9a496a36ef7c7db4decfde0"
end
depends_on "ninja" => :build
depends_on "[email protected]" => :build
on_macos do
depends_on "llvm" => :build
depends_on xcode: ["10.0", :build] # required by v8
end
on_linux do
depends_on "pkg-config" => :build
depends_on "gcc"
depends_on "glib"
end
fails_with gcc: "5"
# Look up the correct resource revisions in the DEP file of the specific releases tag
# e.g. for CIPD dependency gn: https://github.com/v8/v8/blob/9.4.146.16/DEPS#L52
resource "gn" do
url "https://gn.googlesource.com/gn.git",
revision: "69ec4fca1fa69ddadae13f9e6b7507efa0675263"
end
# e.g.: https://github.com/v8/v8/blob/9.4.146.16/DEPS#L93 for the revision of trace event for v8 9.2.230.29
resource "v8/base/trace_event/common" do
url "https://chromium.googlesource.com/chromium/src/base/trace_event/common.git",
revision: "715537d6007ca71837f48bcb04fc3d482aed2507"
end
resource "v8/build" do
url "https://chromium.googlesource.com/chromium/src/build.git",
revision: "17d097b0ffdc297f04afb54e9e3abff3f1203f06"
end
resource "v8/third_party/googletest/src" do
url "https://chromium.googlesource.com/external/github.com/google/googletest.git",
revision: "955c7f837efad184ec63e771c42542d37545eaef"
end
resource "v8/third_party/icu" do
url "https://chromium.googlesource.com/chromium/deps/icu.git",
revision: "ece15d049f2d360721716089372e3749fb89e0f4"
end
resource "v8/third_party/jinja2" do
url "https://chromium.googlesource.com/chromium/src/third_party/jinja2.git",
revision: "6db8da1615a13fdfab925688bc4bf2eb394a73af"
end
resource "v8/third_party/markupsafe" do
url "https://chromium.googlesource.com/chromium/src/third_party/markupsafe.git",
revision: "1b882ef6372b58bfd55a3285f37ed801be9137cd"
end
resource "v8/third_party/zlib" do
url "https://chromium.googlesource.com/chromium/src/third_party/zlib.git",
revision: "77c132322fe81a1f5518b326e18c99ebd3281627"
end
def install
(buildpath/"build").install resource("v8/build")
(buildpath/"third_party/jinja2").install resource("v8/third_party/jinja2")
(buildpath/"third_party/markupsafe").install resource("v8/third_party/markupsafe")
(buildpath/"third_party/googletest/src").install resource("v8/third_party/googletest/src")
(buildpath/"base/trace_event/common").install resource("v8/base/trace_event/common")
(buildpath/"third_party/icu").install resource("v8/third_party/icu")
(buildpath/"third_party/zlib").install resource("v8/third_party/zlib")
# Build gn from source and add it to the PATH
(buildpath/"gn").install resource("gn")
cd "gn" do
system "python3", "build/gen.py"
system "ninja", "-C", "out/", "gn"
end
ENV.prepend_path "PATH", buildpath/"gn/out"
# create gclient_args.gni
(buildpath/"build/config/gclient_args.gni").write <<~EOS
declare_args() {
checkout_google_benchmark = false
}
EOS
# setup gn args
gn_args = {
is_debug: false,
is_component_build: true,
v8_use_external_startup_data: false,
v8_enable_i18n_support: true, # enables i18n support with icu
clang_base_path: "\"#{Formula["llvm"].opt_prefix}\"", # uses Homebrew clang instead of Google clang
clang_use_chrome_plugins: false, # disable the usage of Google's custom clang plugins
use_custom_libcxx: false, # uses system libc++ instead of Google's custom one
treat_warnings_as_errors: false, # ignore not yet supported clang argument warnings
}
if OS.linux?
gn_args[:is_clang] = false # use GCC on Linux
gn_args[:use_sysroot] = false # don't use sysroot
gn_args[:custom_toolchain] = "\"//build/toolchain/linux/unbundle:default\"" # uses system toolchain
gn_args[:host_toolchain] = "\"//build/toolchain/linux/unbundle:default\"" # to respect passed LDFLAGS
ENV["AR"] = DevelopmentTools.locate("ar")
ENV["NM"] = DevelopmentTools.locate("nm")
gn_args[:use_rbe] = false
end
# use clang from homebrew llvm formula, because the system clang is unreliable
ENV.remove "HOMEBREW_LIBRARY_PATHS", Formula["llvm"].opt_lib # but link against system libc++
# Make sure private libraries can be found from lib
ENV.prepend "LDFLAGS", "-Wl,-rpath,#{libexec}"
# Transform to args string
gn_args_string = gn_args.map { |k, v| "#{k}=#{v}" }.join(" ")
# Build with gn + ninja
system "gn", "gen", "--args=#{gn_args_string}", "out.gn"
system "ninja", "-j", ENV.make_jobs, "-C", "out.gn", "-v", "d8"
# Install libraries and headers into libexec so d8 can find them, and into standard directories
# so other packages can find them and they are linked into HOMEBREW_PREFIX
(libexec/"include").install Dir["include/*"]
include.install_symlink Dir[libexec/"include/*"]
libexec.install Dir["out.gn/d8", "out.gn/icudtl.dat"]
bin.write_exec_script libexec/"d8"
libexec.install Dir["out.gn/#{shared_library("*")}"]
lib.install_symlink Dir[libexec/shared_library("libv8*")]
rm Dir[lib/"*.TOC"] if OS.linux? # Remove symlinks to .so.TOC text files
end
test do
assert_equal "Hello World!", shell_output("#{bin}/d8 -e 'print(\"Hello World!\");'").chomp
t = "#{bin}/d8 -e 'print(new Intl.DateTimeFormat(\"en-US\").format(new Date(\"2012-12-20T03:00:00\")));'"
assert_match %r{12/\d{2}/2012}, shell_output(t).chomp
(testpath/"test.cpp").write <<~EOS
#include <libplatform/libplatform.h>
#include <v8.h>
int main(){
static std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform();
v8::V8::InitializePlatform(platform.get());
v8::V8::Initialize();
return 0;
}
EOS
# link against installed libc++
system ENV.cxx, "-std=c++14", "test.cpp",
"-I#{include}",
"-L#{lib}", "-lv8", "-lv8_libplatform"
end
end
| 40.428571 | 122 | 0.682968 |
87ce930a2a0af28129d12271ed1f7eccca5a550c | 98 | module Rails3JQueryAutocomplete
module Rails
class Engine < ::Rails::Engine ; end
end
end
| 16.333333 | 40 | 0.734694 |
18379b8419249bd2a6307254911e02b5510607b3 | 3,634 | # -*- encoding: utf-8 -*-
require 'benchmark'
require 'json'
require 'thor'
require 'flacky'
require 'flacky/flac_metadata_importer'
require 'flacky/flac_track_zero_pad_stripper'
require 'flacky/metadata_generator'
require 'flacky/mp3_convertor'
module Flacky
class CLI < Thor
include Thor::Actions
desc "generate_json <root_path>", "Generate and populate metadata as JSON"
def generate_json(root_dir = ENV['PWD'])
start_dir = File.join(File.expand_path(root_dir), '**/*.flac')
Dir.glob(start_dir).sort.map { |f| File.dirname(f) }.uniq.each do |dir|
mdf = File.join(dir, "metadata.json")
say("Processing <#{dir}>", :cyan)
data = Flacky::MetadataGenerator.new(mdf).combined_data
IO.write(mdf, JSON.pretty_generate(data))
end
end
desc "import_json [file ...]|[**/*.flac ...]", "Import metadata JSON into Flac files"
def import_json(*args)
args.each { |glob| import_metadata_for_files(glob) }
end
desc "missing_urls <root_path>", "List all metadata files with missing URLs"
method_option :print0, :aliases => "-0", :type => :boolean
def missing_urls(root_dir = ENV['PWD'])
start_dir = File.join(File.expand_path(root_dir), '**/metadata.json')
files = []
Dir.glob(start_dir).sort.each do |mdf|
attr = JSON.parse(IO.read(mdf))["allmusic_url"]
files << mdf if attr.nil? || attr.empty?
end
if options[:print0]
print files.join("\x0").concat("\x0")
else
puts files.join("\n") unless files.empty?
end
end
desc "strip_pad [file ...]|[**/*.flac ...]", "Strip zero-padded track numbers in Flac files"
def strip_pad(*args)
args.each do |glob|
Dir.glob(glob).sort.each do |file|
say("Processing <#{file}>", :cyan)
Flacky::FlacTrackZeroPadStripper.new(file).strip!
end
end
end
desc "to_mp3 [file ...]|[**/*.flac ...] [options]", "Convert Flac files to MP3 files"
method_option :destination, :aliases => "-d",
:desc => "Sets optional destination directory"
method_option :'lame-opts', :aliases => "-l",
:default => "--vbr-new --verbose -V 0 -b 320",
:desc => "Set the lame encoding arguments"
def to_mp3(*args)
%w{flac lame}.each do |cmd|
abort "Command #{cmd} must be on your PATH" unless %x{which #{cmd}}
end
mp3izer = Flacky::Mp3Convertor.new(
:lame_opts => options[:'lame-opts'],
:dest_root => options[:destination]
)
args.each { |glob| convert_files(glob, mp3izer) }
end
desc "version", "Print Flacky's version information"
def version
say "Flacky version #{Flacky::VERSION}"
end
map %w(-v --version) => :version
private
def convert_files(glob, mp3izer)
Dir.glob(glob).sort.each do |file|
next unless file =~ /\.flac$/
say("Processing #{file}...", :cyan)
response = mp3izer.convert_file!(file)
say("Created #{response.mp3_filename} #{duration(response.elapsed)}",
:yellow)
end
end
def import_metadata_for_files(glob)
Dir.glob(glob).sort.each do |file|
next unless file =~ /\.flac$/
say("Processing #{file}...", :cyan)
response = Flacky::FlacMetadataImporter.new(file).import!
say("Imported #{response.metadata_filename} into #{file} " +
"#{duration(response.elapsed)}", :yellow)
end
end
def duration(total)
minutes = (total / 60).to_i
seconds = (total - (minutes * 60))
"(%dm%.2fs)" % [minutes, seconds]
end
end
end
| 30.537815 | 96 | 0.608696 |
b9abf33e2771453a44ea84ebf5b91c00ef0de2df | 869 | # frozen_string_literal: true
# Even Fibonacci numbers
# https://projecteuler.net/problem=2
# Store history in each number to calculate the next, and
# generate a lazy stream of numbers to pick from
class FibonacciNumber
attr_reader :value
def initialize(previous_value, value)
@previous_value = previous_value
@value = value
end
def next
FibonacciNumber.new(
@value,
@previous_value + @value
)
end
def to_s
"FibonacciNumber(#{@value})"
end
SEED = new(0, 1)
def self.sequence
enumerator = Enumerator.new do |enum|
current = SEED
loop do
enum.yield current
current = current.next
end
end
enumerator
.lazy
.map(&:value)
end
def self.solve(limit)
sequence
.take_while { |n| n <= limit }
.select(&:even?)
.reduce(0, &:+)
end
end
| 17.734694 | 57 | 0.629459 |
386f910a77421acd3e0a1e27cacbce3f6d2ac1a4 | 153 | edition = Publication.unscoped.find(489_925)
edition.update!(state: "draft")
edition.document.update!(slug: edition.document.slug.gsub(/^deleted-/, ""))
| 38.25 | 75 | 0.751634 |
798f16316e710fe8f9246c71599e061f6010b650 | 116 | RSpec.describe Interfacer do
it 'has a version number' do
expect(Interfacer::VERSION).not_to be nil
end
end
| 19.333333 | 45 | 0.741379 |
1c37be712e9edb1d42455331c8ece1524c2fe3d6 | 955 | class TokyoMetro::App::Renderer::PassengerSurvey::Table::Year::Row < TokyoMetro::App::Renderer::PassengerSurvey::Table::MetaClass::Row
def initialize(
request ,
passenger_survey_info , n , max_passenger_journey_in_graph , make_graph ,
passenger_survey_infos_all_in_the_same_year
)
super( request , passenger_survey_info , n , max_passenger_journey_in_graph , make_graph )
set_order( passenger_survey_infos_all_in_the_same_year )
end
def render
h.render inline: <<-HAML , type: :haml , locals: h_locals
%tr{ class: [ :passenger_survey_table_row , cycle( :odd_row , :even_row ) ] , "data-href" => passenger_survey_info.decorate.station_page_name }
- passenger_survey_info_decorated = passenger_survey_info.decorate
= this.render_order
= passenger_survey_info_decorated.render_station_name_in_table
= passenger_survey_info_decorated.render_passenger_journeys
= this.render_svg_domain_in_table
HAML
end
end
| 38.2 | 143 | 0.781152 |
ed56b5cf86898a2d2ad16090bea62f3e4a2bb93a | 400 | cask "solvespace" do
version "2.3"
sha256 "2e93301bbfef9253b5ef614862d891e2ab84e18c6a1e4e36225c5d47cacabc2d"
url "https://github.com/solvespace/solvespace/releases/download/v#{version}/solvespace.dmg",
verified: "github.com/"
appcast "https://github.com/solvespace/solvespace/releases.atom"
name "SolveSpace"
homepage "https://solvespace.com/index.pl/"
app "solvespace.app"
end
| 30.769231 | 94 | 0.7625 |
bf7f2e4b1ceac3d9c21f47174a19a9f98e868cc1 | 2,583 | class Remarshal < Formula
include Language::Python::Virtualenv
desc "Convert between TOML, YAML and JSON"
homepage "https://github.com/dbohdan/remarshal"
url "https://github.com/dbohdan/remarshal/archive/v0.8.0.tar.gz"
sha256 "ab2ab978aaf20e97719680f8f242ea3407090b562d747205486a02cdbf14d17f"
revision 1
head "https://github.com/dbohdan/remarshal.git"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "b712854f25a498770ff04dab6f675279763ee0b7b1d354407135e80368d8e418" => :mojave
sha256 "44582cb5294bd6f84f5488b9ed3872656b732c81504b6063b104d1244614ab08" => :high_sierra
sha256 "f5006481444b6658f07ecbb7106a99291671690ffe45e6d54ad40372a4c92c20" => :sierra
end
depends_on "python"
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/9e/a3/1d13970c3f36777c583f136c136f804d70f500168edc1edea6daa7200769/PyYAML-3.13.tar.gz"
sha256 "3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf"
end
resource "pytoml" do
url "https://files.pythonhosted.org/packages/6d/2a/c5a0eb781cff59df8613a531f07f9d82bb47ea595aa91c6f114f1621a94a/pytoml-0.1.14.tar.gz"
sha256 "aff69147d436c3ba8c7f3bc1b3f4aa3d7e47d305a495f2631872e6429694aabf"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/c5/39/4da7c2dbc4f023fba5fb2325febcadf0d0ce0efdc8bd12083a0f65d20653/python-dateutil-2.7.2.tar.gz"
sha256 "9d8074be4c993fbe4947878ce593052f71dac82932a677d49194d8ce9778002e"
end
resource "six" do
url "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"
sha256 "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
end
def install
virtualenv_install_with_resources
["toml", "yaml", "json"].permutation(2).each do |informat, outformat|
bin.install_symlink "remarshal" => "#{informat}2#{outformat}"
end
end
test do
json = <<~EOS.chomp
{"foo.bar":"baz","qux":1}
EOS
yaml = <<~EOS.chomp
foo.bar: baz
qux: 1
EOS
toml = <<~EOS.chomp
"foo.bar" = "baz"
qux = 1
EOS
assert_equal yaml, pipe_output("#{bin}/remarshal -if=json -of=yaml", json)
assert_equal yaml, pipe_output("#{bin}/json2yaml", json)
assert_equal toml, pipe_output("#{bin}/remarshal -if=yaml -of=toml", yaml)
assert_equal toml, pipe_output("#{bin}/yaml2toml", yaml)
assert_equal json, pipe_output("#{bin}/remarshal -if=toml -of=json", toml).chomp
assert_equal json, pipe_output("#{bin}/toml2json", toml).chomp
end
end
| 36.380282 | 145 | 0.753388 |
f8b87f0f5d199ea0bf93955761e76d0b0cf970f9 | 4,064 | require(File.expand_path('report/cell', File.dirname(__FILE__)))
require(File.expand_path('report/row', File.dirname(__FILE__)))
# require_relative './report/cell'
# require_relative './report/row'
module XeroGateway
class Report
include Money
include Dates
attr_reader :errors
attr_accessor :report_id, :report_name, :report_type, :report_titles, :report_date, :updated_at,
:body, :column_names
alias :rows :body
def initialize(params={})
@errors ||= []
@report_titles ||= []
@body ||= []
params.each do |k,v|
self.send("#{k}=", v)
end
end
class << self
def from_xml(report_element)
report = Report.new
report_element.children.each do | element |
case element.name
when 'ReportID' then report.report_id = element.text
when 'ReportName' then report.report_name = element.text
when 'ReportType' then report.report_type = element.text
when 'ReportTitles'
each_title(element) do |title|
report.report_titles << title
end
when 'ReportDate' then report.report_date = Date.parse(element.text)
when 'UpdatedDateUTC' then report.updated_at = parse_date_time_utc(element.text)
when 'Rows'
report.column_names ||= find_body_column_names(element)
each_row_content(element) do |row|
report.body << row
end
end
end
report
end
private
def each_row_content(xml_element, &block)
column_names = find_body_column_names(xml_element).values
report_sections = REXML::XPath.each(xml_element, "//RowType[text()='Section']/parent::Row")
report_sections.each do |section_row|
section_name = section_row.get_elements("Title").first.try(:text)
section_row.elements.each("Rows/Row") do |xpath_cells|
values = find_body_cell_values(xpath_cells)
yield Row.new(column_names, values, section_name)
end
end
end
def each_title(xml_element, &block)
xpath_titles = REXML::XPath.first(xml_element, "//ReportTitles")
xpath_titles.elements.each("//ReportTitle") do |xpath_title|
title = xpath_title.text.strip
yield title if block_given?
end
end
def find_body_cell_values(xml_cells)
values = []
xml_cells.elements.each("Cells/Cell") do |xml_cell|
if value = xml_cell.children.first # finds <Value>...</Value>
values << Cell.new(value.text.try(:strip), collect_attributes(xml_cell))
next
end
values << nil
end
values
end
# Collects "<Attribute>" elements into a hash
def collect_attributes(xml_cell)
Array.wrap(xml_cell.elements["Attributes/Attribute"]).inject({}) do |hash, xml_attribute|
if (key = xml_attribute.elements["Id"].try(:text)) &&
(value = xml_attribute.elements["Value"].try(:text))
hash[key] = value
end
hash
end.symbolize_keys
end
# returns something like { column_1: "Amount", column_2: "Description", ... }
def find_body_column_names(body)
header = REXML::XPath.first(body, "//RowType[text()='Header']")
names_map = {}
column_count = 0
header.parent.elements.each("Cells/Cell") do |header_cell|
column_count += 1
column_key = "column_#{column_count}".to_sym
column_name = nil
name_value = header_cell.children.first
column_name = name_value.text.strip unless name_value.blank? # finds <Value>...</Value>
names_map[column_key] = column_name
end
names_map
end
end
end
end
| 34.151261 | 101 | 0.578494 |
38dfa63e8402bd24b744c2e0179880912d339df8 | 787 | # frozen_string_literal: true
module Admin
class AdminPolicy
attr_accessor :user, :record
def initialize(user, record)
@user = user
@record = record
end
def index?
user.has_role?(:admin)
end
def show?
user.has_role?(:admin)
end
def create?
user.has_role?(:admin)
end
def new?
create?
end
def update?
user.has_role?(:admin)
end
def edit?
update?
end
def destroy?
user.has_role?(:admin)
end
def scope
Pundit.policy_scope!(user, record)
end
class Scope
attr_reader :user, :scope
def initialize(user, scope)
@user = user
@scope = scope
end
def resolve
scope
end
end
end
end
| 13.116667 | 40 | 0.551461 |
08918217919153b52819400f0757769f389c2bca | 23,000 | # frozen_string_literal: true
require 'sidekiq'
module Sidekiq
module RedisScanner
def sscan(conn, key)
cursor = '0'
result = []
loop do
cursor, values = conn.sscan(key, cursor)
result.push(*values)
break if cursor == '0'
end
result
end
end
class Stats
include RedisScanner
def initialize
fetch_stats!
end
def processed
stat :processed
end
def failed
stat :failed
end
def scheduled_size
stat :scheduled_size
end
def retry_size
stat :retry_size
end
def dead_size
stat :dead_size
end
def enqueued
stat :enqueued
end
def processes_size
stat :processes_size
end
def workers_size
stat :workers_size
end
def default_queue_latency
stat :default_queue_latency
end
def queues
Sidekiq::Stats::Queues.new.lengths
end
def fetch_stats!
pipe1_res = Sidekiq.redis do |conn|
conn.pipelined do
conn.get('stat:processed')
conn.get('stat:failed')
conn.zcard('schedule')
conn.zcard('retry')
conn.zcard('dead')
conn.scard('processes')
conn.lrange('queue:default', -1, -1)
end
end
processes = Sidekiq.redis do |conn|
sscan(conn, 'processes')
end
queues = Sidekiq.redis do |conn|
sscan(conn, 'queues')
end
pipe2_res = Sidekiq.redis do |conn|
conn.pipelined do
processes.each {|key| conn.hget(key, 'busy') }
queues.each {|queue| conn.llen("queue:#{queue}") }
end
end
s = processes.size
workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
default_queue_latency = if (entry = pipe1_res[6].first)
job = Sidekiq.load_json(entry) rescue {}
now = Time.now.to_f
thence = job['enqueued_at'] || now
now - thence
else
0
end
@stats = {
processed: pipe1_res[0].to_i,
failed: pipe1_res[1].to_i,
scheduled_size: pipe1_res[2],
retry_size: pipe1_res[3],
dead_size: pipe1_res[4],
processes_size: pipe1_res[5],
default_queue_latency: default_queue_latency,
workers_size: workers_size,
enqueued: enqueued
}
end
def reset(*stats)
all = %w(failed processed)
stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
mset_args = []
stats.each do |stat|
mset_args << "stat:#{stat}"
mset_args << 0
end
Sidekiq.redis do |conn|
conn.mset(*mset_args)
end
end
private
def stat(s)
@stats[s]
end
class Queues
include RedisScanner
def lengths
Sidekiq.redis do |conn|
queues = sscan(conn, 'queues')
lengths = conn.pipelined do
queues.each do |queue|
conn.llen("queue:#{queue}")
end
end
i = 0
array_of_arrays = queues.inject({}) do |memo, queue|
memo[queue] = lengths[i]
i += 1
memo
end.sort_by { |_, size| size }
Hash[array_of_arrays.reverse]
end
end
end
class History
def initialize(days_previous, start_date = nil)
@days_previous = days_previous
@start_date = start_date || Time.now.utc.to_date
end
def processed
@processed ||= date_stat_hash("processed")
end
def failed
@failed ||= date_stat_hash("failed")
end
private
def date_stat_hash(stat)
i = 0
stat_hash = {}
keys = []
dates = []
while i < @days_previous
date = @start_date - i
datestr = date.strftime("%Y-%m-%d")
keys << "stat:#{stat}:#{datestr}"
dates << datestr
i += 1
end
begin
Sidekiq.redis do |conn|
conn.mget(keys).each_with_index do |value, idx|
stat_hash[dates[idx]] = value ? value.to_i : 0
end
end
rescue Redis::CommandError
# mget will trigger a CROSSSLOT error when run against a Cluster
# TODO Someone want to add Cluster support?
end
stat_hash
end
end
end
##
# Encapsulates a queue within Sidekiq.
# Allows enumeration of all jobs within the queue
# and deletion of jobs.
#
# queue = Sidekiq::Queue.new("mailer")
# queue.each do |job|
# job.klass # => 'MyWorker'
# job.args # => [1, 2, 3]
# job.delete if job.jid == 'abcdef1234567890'
# end
#
class Queue
include Enumerable
extend RedisScanner
##
# Return all known queues within Redis.
#
def self.all
Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
end
attr_reader :name
def initialize(name="default")
@name = name.to_s
@rname = "queue:#{name}"
end
def size
Sidekiq.redis { |con| con.llen(@rname) }
end
# Sidekiq Pro overrides this
def paused?
false
end
##
# Calculates this queue's latency, the difference in seconds since the oldest
# job in the queue was enqueued.
#
# @return Float
def latency
entry = Sidekiq.redis do |conn|
conn.lrange(@rname, -1, -1)
end.first
return 0 unless entry
job = Sidekiq.load_json(entry)
now = Time.now.to_f
thence = job['enqueued_at'] || now
now - thence
end
def each
initial_size = size
deleted_size = 0
page = 0
page_size = 50
while true do
range_start = page * page_size - deleted_size
range_end = range_start + page_size - 1
entries = Sidekiq.redis do |conn|
conn.lrange @rname, range_start, range_end
end
break if entries.empty?
page += 1
entries.each do |entry|
yield Job.new(entry, @name)
end
deleted_size = initial_size - size
end
end
##
# Find the job with the given JID within this queue.
#
# This is a slow, inefficient operation. Do not use under
# normal conditions. Sidekiq Pro contains a faster version.
def find_job(jid)
detect { |j| j.jid == jid }
end
def clear
Sidekiq.redis do |conn|
conn.multi do
conn.del(@rname)
conn.srem("queues", name)
end
end
end
alias_method :💣, :clear
end
##
# Encapsulates a pending job within a Sidekiq queue or
# sorted set.
#
# The job should be considered immutable but may be
# removed from the queue via Job#delete.
#
class Job
attr_reader :item
attr_reader :value
def initialize(item, queue_name=nil)
@args = nil
@value = item
@item = item.is_a?(Hash) ? item : parse(item)
@queue = queue_name || @item['queue']
end
def parse(item)
Sidekiq.load_json(item)
rescue JSON::ParserError
# If the job payload in Redis is invalid JSON, we'll load
# the item as an empty hash and store the invalid JSON as
# the job 'args' for display in the Web UI.
@invalid = true
@args = [item]
{}
end
def klass
self['class']
end
def display_class
# Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
@klass ||= case klass
when /\ASidekiq::Extensions::Delayed/
safe_load(args[0], klass) do |target, method, _|
"#{target}.#{method}"
end
when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
job_class = @item['wrapped'] || args[0]
if 'ActionMailer::DeliveryJob' == job_class
# MailerClass#mailer_method
args[0]['arguments'][0..1].join('#')
else
job_class
end
else
klass
end
end
def display_args
# Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
@display_args ||= case klass
when /\ASidekiq::Extensions::Delayed/
safe_load(args[0], args) do |_, _, arg|
arg
end
when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
job_args = self['wrapped'] ? args[0]["arguments"] : []
if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
# remove MailerClass, mailer_method and 'deliver_now'
job_args.drop(3)
else
job_args
end
else
if self['encrypt']
# no point in showing 150+ bytes of random garbage
args[-1] = '[encrypted data]'
end
args
end
end
def args
@args || @item['args']
end
def jid
self['jid']
end
def enqueued_at
self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
end
def created_at
Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
end
def queue
@queue
end
def latency
now = Time.now.to_f
now - (@item['enqueued_at'] || @item['created_at'] || now)
end
##
# Remove this job from the queue.
def delete
count = Sidekiq.redis do |conn|
conn.lrem("queue:#{@queue}", 1, @value)
end
count != 0
end
def [](name)
# nil will happen if the JSON fails to parse.
# We don't guarantee Sidekiq will work with bad job JSON but we should
# make a best effort to minimize the damage.
@item ? @item[name] : nil
end
private
def safe_load(content, default)
begin
yield(*YAML.load(content))
rescue => ex
# #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
# memory yet so the YAML can't be loaded.
Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
default
end
end
end
class SortedEntry < Job
attr_reader :score
attr_reader :parent
def initialize(parent, score, item)
super(item)
@score = score
@parent = parent
end
def at
Time.at(score).utc
end
def delete
if @value
@parent.delete_by_value(@parent.name, @value)
else
@parent.delete_by_jid(score, jid)
end
end
def reschedule(at)
delete
@parent.schedule(at, item)
end
def add_to_queue
remove_job do |message|
msg = Sidekiq.load_json(message)
Sidekiq::Client.push(msg)
end
end
def retry
remove_job do |message|
msg = Sidekiq.load_json(message)
msg['retry_count'] -= 1 if msg['retry_count']
Sidekiq::Client.push(msg)
end
end
##
# Place job in the dead set
def kill
remove_job do |message|
DeadSet.new.kill(message)
end
end
def error?
!!item['error_class']
end
private
def remove_job
Sidekiq.redis do |conn|
results = conn.multi do
conn.zrangebyscore(parent.name, score, score)
conn.zremrangebyscore(parent.name, score, score)
end.first
if results.size == 1
yield results.first
else
# multiple jobs with the same score
# find the one with the right JID and push it
hash = results.group_by do |message|
if message.index(jid)
msg = Sidekiq.load_json(message)
msg['jid'] == jid
else
false
end
end
msg = hash.fetch(true, []).first
yield msg if msg
# push the rest back onto the sorted set
conn.multi do
hash.fetch(false, []).each do |message|
conn.zadd(parent.name, score.to_f.to_s, message)
end
end
end
end
end
end
class SortedSet
include Enumerable
attr_reader :name
def initialize(name)
@name = name
@_size = size
end
def size
Sidekiq.redis { |c| c.zcard(name) }
end
def clear
Sidekiq.redis do |conn|
conn.del(name)
end
end
alias_method :💣, :clear
end
class JobSet < SortedSet
def schedule(timestamp, message)
Sidekiq.redis do |conn|
conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
end
end
def each
initial_size = @_size
offset_size = 0
page = -1
page_size = 50
while true do
range_start = page * page_size + offset_size
range_end = range_start + page_size - 1
elements = Sidekiq.redis do |conn|
conn.zrange name, range_start, range_end, with_scores: true
end
break if elements.empty?
page -= 1
elements.reverse.each do |element, score|
yield SortedEntry.new(self, score, element)
end
offset_size = initial_size - @_size
end
end
def fetch(score, jid = nil)
elements = Sidekiq.redis do |conn|
conn.zrangebyscore(name, score, score)
end
elements.inject([]) do |result, element|
entry = SortedEntry.new(self, score, element)
if jid
result << entry if entry.jid == jid
else
result << entry
end
result
end
end
##
# Find the job with the given JID within this sorted set.
#
# This is a slow, inefficient operation. Do not use under
# normal conditions. Sidekiq Pro contains a faster version.
def find_job(jid)
self.detect { |j| j.jid == jid }
end
def delete_by_value(name, value)
Sidekiq.redis do |conn|
ret = conn.zrem(name, value)
@_size -= 1 if ret
ret
end
end
def delete_by_jid(score, jid)
Sidekiq.redis do |conn|
elements = conn.zrangebyscore(name, score, score)
elements.each do |element|
message = Sidekiq.load_json(element)
if message["jid"] == jid
ret = conn.zrem(name, element)
@_size -= 1 if ret
break ret
end
false
end
end
end
alias_method :delete, :delete_by_jid
end
##
# Allows enumeration of scheduled jobs within Sidekiq.
# Based on this, you can search/filter for jobs. Here's an
# example where I'm selecting all jobs of a certain type
# and deleting them from the schedule queue.
#
# r = Sidekiq::ScheduledSet.new
# r.select do |scheduled|
# scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
# scheduled.args[0] == 'User' &&
# scheduled.args[1] == 'setup_new_subscriber'
# end.map(&:delete)
class ScheduledSet < JobSet
def initialize
super 'schedule'
end
end
##
# Allows enumeration of retries within Sidekiq.
# Based on this, you can search/filter for jobs. Here's an
# example where I'm selecting all jobs of a certain type
# and deleting them from the retry queue.
#
# r = Sidekiq::RetrySet.new
# r.select do |retri|
# retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
# retri.args[0] == 'User' &&
# retri.args[1] == 'setup_new_subscriber'
# end.map(&:delete)
class RetrySet < JobSet
def initialize
super 'retry'
end
def retry_all
while size > 0
each(&:retry)
end
end
def kill_all
while size > 0
each(&:kill)
end
end
end
##
# Allows enumeration of dead jobs within Sidekiq.
#
class DeadSet < JobSet
def initialize
super 'dead'
end
def kill(message, opts={})
now = Time.now.to_f
Sidekiq.redis do |conn|
conn.multi do
conn.zadd(name, now.to_s, message)
conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
conn.zremrangebyrank(name, 0, - self.class.max_jobs)
end
end
if opts[:notify_failure] != false
job = Sidekiq.load_json(message)
r = RuntimeError.new("Job killed by API")
r.set_backtrace(caller)
Sidekiq.death_handlers.each do |handle|
handle.call(job, r)
end
end
true
end
def retry_all
while size > 0
each(&:retry)
end
end
def self.max_jobs
Sidekiq.options[:dead_max_jobs]
end
def self.timeout
Sidekiq.options[:dead_timeout_in_seconds]
end
end
##
# Enumerates the set of Sidekiq processes which are actively working
# right now. Each process send a heartbeat to Redis every 5 seconds
# so this set should be relatively accurate, barring network partitions.
#
# Yields a Sidekiq::Process.
#
class ProcessSet
include Enumerable
include RedisScanner
def initialize(clean_plz=true)
cleanup if clean_plz
end
# Cleans up dead processes recorded in Redis.
# Returns the number of processes cleaned.
def cleanup
count = 0
Sidekiq.redis do |conn|
procs = sscan(conn, 'processes').sort
heartbeats = conn.pipelined do
procs.each do |key|
conn.hget(key, 'info')
end
end
# the hash named key has an expiry of 60 seconds.
# if it's not found, that means the process has not reported
# in to Redis and probably died.
to_prune = []
heartbeats.each_with_index do |beat, i|
to_prune << procs[i] if beat.nil?
end
count = conn.srem('processes', to_prune) unless to_prune.empty?
end
count
end
def each
procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
Sidekiq.redis do |conn|
# We're making a tradeoff here between consuming more memory instead of
# making more roundtrips to Redis, but if you have hundreds or thousands of workers,
# you'll be happier this way
result = conn.pipelined do
procs.each do |key|
conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
end
end
result.each do |info, busy, at_s, quiet|
# If a process is stopped between when we query Redis for `procs` and
# when we query for `result`, we will have an item in `result` that is
# composed of `nil` values.
next if info.nil?
hash = Sidekiq.load_json(info)
yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
end
end
nil
end
# This method is not guaranteed accurate since it does not prune the set
# based on current heartbeat. #each does that and ensures the set only
# contains Sidekiq processes which have sent a heartbeat within the last
# 60 seconds.
def size
Sidekiq.redis { |conn| conn.scard('processes') }
end
# Returns the identity of the current cluster leader or "" if no leader.
# This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
# or Sidekiq Pro.
def leader
@leader ||= begin
x = Sidekiq.redis {|c| c.get("dear-leader") }
# need a non-falsy value so we can memoize
x = "" unless x
x
end
end
end
#
# Sidekiq::Process represents an active Sidekiq process talking with Redis.
# Each process has a set of attributes which look like this:
#
# {
# 'hostname' => 'app-1.example.com',
# 'started_at' => <process start time>,
# 'pid' => 12345,
# 'tag' => 'myapp'
# 'concurrency' => 25,
# 'queues' => ['default', 'low'],
# 'busy' => 10,
# 'beat' => <last heartbeat>,
# 'identity' => <unique string identifying the process>,
# }
class Process
def initialize(hash)
@attribs = hash
end
def tag
self['tag']
end
def labels
Array(self['labels'])
end
def [](key)
@attribs[key]
end
def identity
self['identity']
end
def quiet!
signal('TSTP')
end
def stop!
signal('TERM')
end
def dump_threads
signal('TTIN')
end
def stopping?
self['quiet'] == 'true'
end
private
def signal(sig)
key = "#{identity}-signals"
Sidekiq.redis do |c|
c.multi do
c.lpush(key, sig)
c.expire(key, 60)
end
end
end
end
##
# A worker is a thread that is currently processing a job.
# Programmatic access to the current active worker set.
#
# WARNING WARNING WARNING
#
# This is live data that can change every millisecond.
# If you call #size => 5 and then expect #each to be
# called 5 times, you're going to have a bad time.
#
# workers = Sidekiq::Workers.new
# workers.size => 2
# workers.each do |process_id, thread_id, work|
# # process_id is a unique identifier per Sidekiq process
# # thread_id is a unique identifier per thread
# # work is a Hash which looks like:
# # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
# # run_at is an epoch Integer.
# end
#
class Workers
include Enumerable
include RedisScanner
def each
Sidekiq.redis do |conn|
procs = sscan(conn, 'processes')
procs.sort.each do |key|
valid, workers = conn.pipelined do
conn.exists(key)
conn.hgetall("#{key}:workers")
end
next unless valid
workers.each_pair do |tid, json|
yield key, tid, Sidekiq.load_json(json)
end
end
end
end
# Note that #size is only as accurate as Sidekiq's heartbeat,
# which happens every 5 seconds. It is NOT real-time.
#
# Not very efficient if you have lots of Sidekiq
# processes but the alternative is a global counter
# which can easily get out of sync with crashy processes.
def size
Sidekiq.redis do |conn|
procs = sscan(conn, 'processes')
if procs.empty?
0
else
conn.pipelined do
procs.each do |key|
conn.hget(key, 'busy')
end
end.map(&:to_i).inject(:+)
end
end
end
end
end
| 24.442083 | 118 | 0.556304 |
1a1302bdeb519ac871a4a7f519b13bcbfffb13d9 | 220 | $:.unshift(File.expand_path("../", __FILE__))
require "configset/version"
require "rainbow/ext/string"
require "configset/autoloader"
Configset::Autoloader.setup
module Configset
class Error < StandardError; end
end
| 20 | 45 | 0.772727 |
0875afbc663517a28343289aa62349e5b6d97d7f | 3,285 | # coding: utf-8
require "rails_helper"
class DummyController < ActionController::Base
include ContextualizedLogs::ContextualizedController
def show
Model.last
render json: {}
end
end
class ContextualizedModelDummyController < ActionController::Base
include ContextualizedLogs::ContextualizedController
contextualize_model true
def show
Model.last
render json: {}
end
end
RSpec.describe DummyController, type: :controller do
let(:params) { { a: 'a' } }
let!(:model) { FactoryBot.create(:model, value: 'value') }
let(:current_context) { ContextualizedLogs::CurrentContext }
before do
Rails.application.routes.draw {
get 'dummy' => 'dummy#show'
}
end
it 'should set request details' do
expect_any_instance_of(DummyController).to receive(:contextualize_request)
get :show, params: params
end
it 'should NOT set enable model context values' do
get :show, params: params
expect(current_context.contextualize_model_enabled).to eq(false)
expect(current_context.context_values).to eq(nil)
end
it 'should set resource_name' do
get :show, params: params
expect(current_context.resource_name).to eq('dummycontroller_show')
end
it 'should set request details' do
%w[user-agent referer origin].each do |header|
@request.headers[header] = header
end
allow_any_instance_of(ActionDispatch::Request).to receive(:remote_addr).and_return('192.168.0.0')
allow_any_instance_of(ActionDispatch::Request).to receive(:remote_ip).and_return('192.168.0.1')
allow_any_instance_of(ActionDispatch::Request).to receive(:ip).and_return('192.168.0.2')
allow_any_instance_of(ActionDispatch::Request).to receive(:x_forwarded_for).and_return(['192.168.0.3', '192.168.0.4'])
allow_any_instance_of(ActionDispatch::Request).to receive(:xhr?).and_return(true)
allow_any_instance_of(ActionDispatch::Request).to receive(:uuid).and_return('request_uuid')
get :show, params: params
expect(current_context.request_uuid).to eq('request_uuid')
expect(current_context.request_origin).to eq('origin')
expect(current_context.request_referer).to eq('referer')
expect(current_context.request_remote_addr).to eq('192.168.0.0')
expect(current_context.request_remote_ip).to eq('192.168.0.1')
expect(current_context.request_ip).to eq('192.168.0.2')
expect(current_context.request_x_forwarded_for).to eq(['192.168.0.3', '192.168.0.4'])
expect(current_context.request_xhr).to eq('true')
end
end
RSpec.describe ContextualizedModelDummyController, type: :controller do
let(:params) { { a: 'a' } }
let!(:model) { FactoryBot.create(:model, value: 'value') }
let(:current_context) { ContextualizedLogs::CurrentContext }
before do
routes.draw {
get 'dummy' => 'contextualized_model_dummy#show'
}
end
it 'should set request details' do
expect_any_instance_of(ContextualizedModelDummyController).to receive(:contextualize_request)
get :show, params: params
end
it 'should set enable model context values' do
get :show, params: params
expect(current_context.contextualize_model_enabled).to eq(true)
expect(current_context.context_values).to eq(model_values: ['value'], model_ids: [model.id])
end
end
| 34.21875 | 122 | 0.734551 |
d539724bb3c65ff850dd1dd9424ce0559ad73432 | 594 | # == Schema Information
#
# Table name: publications
#
# id :bigint(8) not null, primary key
# stock_number :integer
# isbn :string
# year :integer
# price :decimal(, )
# publisher_id :bigint(8)
# novel_id :bigint(8)
# created_at :datetime not null
# updated_at :datetime not null
#
# Indexes
#
# index_publications_on_novel_id (novel_id)
# index_publications_on_publisher_id (publisher_id)
#
class Publication < ApplicationRecord
belongs_to :novel
belongs_to :publisher
has_one_attached :cover_image
end
| 22 | 55 | 0.653199 |
4a93697a1b5281f2f8538147636a49a235726bb5 | 20,506 | #
# Author:: Adam Jacob (<[email protected]>)
# Author:: Christopher Brown (<[email protected]>)
# Copyright:: Copyright (c) 2009 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'forwardable'
require 'chef/version'
require 'mixlib/cli'
require 'chef/workstation_config_loader'
require 'chef/mixin/convert_to_class_name'
require 'chef/mixin/path_sanity'
require 'chef/knife/core/subcommand_loader'
require 'chef/knife/core/ui'
require 'chef/local_mode'
require 'chef/rest'
require 'chef/http/authenticator'
require 'pp'
class Chef
class Knife
Chef::REST::RESTRequest.user_agent = "Chef Knife#{Chef::REST::RESTRequest::UA_COMMON}"
include Mixlib::CLI
include Chef::Mixin::PathSanity
extend Chef::Mixin::ConvertToClassName
extend Forwardable
# Backwards Compat:
# Ideally, we should not vomit all of these methods into this base class;
# instead, they should be accessed by hitting the ui object directly.
def_delegator :@ui, :stdout
def_delegator :@ui, :stderr
def_delegator :@ui, :stdin
def_delegator :@ui, :msg
def_delegator :@ui, :ask_question
def_delegator :@ui, :pretty_print
def_delegator :@ui, :output
def_delegator :@ui, :format_list_for_display
def_delegator :@ui, :format_for_display
def_delegator :@ui, :format_cookbook_list_for_display
def_delegator :@ui, :edit_data
def_delegator :@ui, :edit_hash
def_delegator :@ui, :edit_object
def_delegator :@ui, :confirm
attr_accessor :name_args
attr_accessor :ui
# Configure mixlib-cli to always separate defaults from user-supplied CLI options
def self.use_separate_defaults?
true
end
def self.ui
@ui ||= Chef::Knife::UI.new(STDOUT, STDERR, STDIN, {})
end
def self.msg(msg="")
ui.msg(msg)
end
def self.reset_config_loader!
@@chef_config_dir = nil
@config_loader = nil
end
def self.reset_subcommands!
@@subcommands = {}
@subcommands_by_category = nil
end
def self.inherited(subclass)
unless subclass.unnamed?
subcommands[subclass.snake_case_name] = subclass
end
end
# Explicitly set the category for the current command to +new_category+
# The category is normally determined from the first word of the command
# name, but some commands make more sense using two or more words
# ===Arguments
# new_category::: A String to set the category to (see examples)
# ===Examples:
# Data bag commands would be in the 'data' category by default. To put them
# in the 'data bag' category:
# category('data bag')
def self.category(new_category)
@category = new_category
end
def self.subcommand_category
@category || snake_case_name.split('_').first unless unnamed?
end
def self.snake_case_name
convert_to_snake_case(name.split('::').last) unless unnamed?
end
def self.common_name
snake_case_name.split('_').join(' ')
end
# Does this class have a name? (Classes created via Class.new don't)
def self.unnamed?
name.nil? || name.empty?
end
def self.subcommand_loader
@subcommand_loader ||= Knife::SubcommandLoader.new(chef_config_dir)
end
def self.load_commands
@commands_loaded ||= subcommand_loader.load_commands
end
def self.subcommands
@@subcommands ||= {}
end
def self.subcommands_by_category
unless @subcommands_by_category
@subcommands_by_category = Hash.new { |hash, key| hash[key] = [] }
subcommands.each do |snake_cased, klass|
@subcommands_by_category[klass.subcommand_category] << snake_cased
end
end
@subcommands_by_category
end
# Print the list of subcommands knife knows about. If +preferred_category+
# is given, only subcommands in that category are shown
def self.list_commands(preferred_category=nil)
load_commands
category_desc = preferred_category ? preferred_category + " " : ''
msg "Available #{category_desc}subcommands: (for details, knife SUB-COMMAND --help)\n\n"
if preferred_category && subcommands_by_category.key?(preferred_category)
commands_to_show = {preferred_category => subcommands_by_category[preferred_category]}
else
commands_to_show = subcommands_by_category
end
commands_to_show.sort.each do |category, commands|
next if category =~ /deprecated/i
msg "** #{category.upcase} COMMANDS **"
commands.sort.each do |command|
msg subcommands[command].banner if subcommands[command]
end
msg
end
end
# Shared with subclasses
@@chef_config_dir = nil
def self.config_loader
@config_loader ||= WorkstationConfigLoader.new(nil, Chef::Log)
end
def self.load_config(explicit_config_file)
config_loader.explicit_config_file = explicit_config_file
config_loader.load
ui.warn("No knife configuration file found") if config_loader.no_config_found?
config_loader
rescue Exceptions::ConfigurationError => e
ui.error(ui.color("CONFIGURATION ERROR:", :red) + e.message)
exit 1
end
def self.chef_config_dir
@@chef_config_dir ||= config_loader.chef_config_dir
end
# Run knife for the given +args+ (ARGV), adding +options+ to the list of
# CLI options that the subcommand knows how to handle.
# ===Arguments
# args::: usually ARGV
# options::: A Mixlib::CLI option parser hash. These +options+ are how
# subcommands know about global knife CLI options
def self.run(args, options={})
# Fallback debug logging. Normally the logger isn't configured until we
# read the config, but this means any logging that happens before the
# config file is read may be lost. If the KNIFE_DEBUG variable is set, we
# setup the logger for debug logging to stderr immediately to catch info
# from early in the setup process.
if ENV['KNIFE_DEBUG']
Chef::Log.init($stderr)
Chef::Log.level(:debug)
end
load_commands
subcommand_class = subcommand_class_from(args)
subcommand_class.options = options.merge!(subcommand_class.options)
subcommand_class.load_deps
instance = subcommand_class.new(args)
instance.configure_chef
instance.run_with_pretty_exceptions
end
def self.guess_category(args)
category_words = args.select {|arg| arg =~ /^(([[:alnum:]])[[:alnum:]\_\-]+)$/ }
category_words.map! {|w| w.split('-')}.flatten!
matching_category = nil
while (!matching_category) && (!category_words.empty?)
candidate_category = category_words.join(' ')
matching_category = candidate_category if subcommands_by_category.key?(candidate_category)
matching_category || category_words.pop
end
matching_category
end
def self.subcommand_class_from(args)
command_words = args.select {|arg| arg =~ /^(([[:alnum:]])[[:alnum:]\_\-]+)$/ }
subcommand_class = nil
while ( !subcommand_class ) && ( !command_words.empty? )
snake_case_class_name = command_words.join("_")
unless subcommand_class = subcommands[snake_case_class_name]
command_words.pop
end
end
# see if we got the command as e.g., knife node-list
subcommand_class ||= subcommands[args.first.gsub('-', '_')]
subcommand_class || subcommand_not_found!(args)
end
def self.dependency_loaders
@dependency_loaders ||= []
end
def self.deps(&block)
dependency_loaders << block
end
def self.load_deps
dependency_loaders.each do |dep_loader|
dep_loader.call
end
end
private
OFFICIAL_PLUGINS = %w[ec2 rackspace windows openstack terremark bluebox]
# :nodoc:
# Error out and print usage. probably because the arguments given by the
# user could not be resolved to a subcommand.
def self.subcommand_not_found!(args)
ui.fatal("Cannot find sub command for: '#{args.join(' ')}'")
if category_commands = guess_category(args)
list_commands(category_commands)
elsif missing_plugin = ( OFFICIAL_PLUGINS.find {|plugin| plugin == args[0]} )
ui.info("The #{missing_plugin} commands were moved to plugins in Chef 0.10")
ui.info("You can install the plugin with `(sudo) gem install knife-#{missing_plugin}`")
ui.info("Use `chef gem install knife-#{missing_plugin}` instead if using ChefDK")
else
list_commands
end
exit 10
end
def self.reset_config_path!
@@chef_config_dir = nil
end
reset_config_path!
public
# Create a new instance of the current class configured for the given
# arguments and options
def initialize(argv=[])
super() # having to call super in initialize is the most annoying anti-pattern :(
@ui = Chef::Knife::UI.new(STDOUT, STDERR, STDIN, config)
command_name_words = self.class.snake_case_name.split('_')
# Mixlib::CLI ignores the embedded name_args
@name_args = parse_options(argv)
@name_args.delete(command_name_words.join('-'))
@name_args.reject! { |name_arg| command_name_words.delete(name_arg) }
# knife node run_list add requires that we have extra logic to handle
# the case that command name words could be joined by an underscore :/
command_name_words = command_name_words.join('_')
@name_args.reject! { |name_arg| command_name_words == name_arg }
if config[:help]
msg opt_parser
exit 1
end
# copy Mixlib::CLI over so that it can be configured in knife.rb
# config file
Chef::Config[:verbosity] = config[:verbosity]
end
def parse_options(args)
super
rescue OptionParser::InvalidOption => e
puts "Error: " + e.to_s
show_usage
exit(1)
end
# Returns a subset of the Chef::Config[:knife] Hash that is relevant to the
# currently executing knife command. This is used by #configure_chef to
# apply settings from knife.rb to the +config+ hash.
def config_file_settings
config_file_settings = {}
self.class.options.keys.each do |key|
config_file_settings[key] = Chef::Config[:knife][key] if Chef::Config[:knife].has_key?(key)
end
config_file_settings
end
# Apply Config in this order:
# defaults from mixlib-cli
# settings from config file, via Chef::Config[:knife]
# config from command line
def merge_configs
# Apply config file settings on top of mixlib-cli defaults
combined_config = default_config.merge(config_file_settings)
# Apply user-supplied options on top of the above combination
combined_config = combined_config.merge(config)
# replace the config hash from mixlib-cli with our own.
# Need to use the mutate-in-place #replace method instead of assigning to
# the instance variable because other code may have a reference to the
# original config hash object.
config.replace(combined_config)
end
# Catch-all method that does any massaging needed for various config
# components, such as expanding file paths and converting verbosity level
# into log level.
def apply_computed_config
Chef::Config[:color] = config[:color]
case Chef::Config[:verbosity]
when 0, nil
Chef::Config[:log_level] = :warn
when 1
Chef::Config[:log_level] = :info
else
Chef::Config[:log_level] = :debug
end
Chef::Config[:log_level] = :debug if ENV['KNIFE_DEBUG']
Chef::Config[:node_name] = config[:node_name] if config[:node_name]
Chef::Config[:client_key] = config[:client_key] if config[:client_key]
Chef::Config[:chef_server_url] = config[:chef_server_url] if config[:chef_server_url]
Chef::Config[:environment] = config[:environment] if config[:environment]
Chef::Config.local_mode = config[:local_mode] if config.has_key?(:local_mode)
Chef::Config.listen = config[:listen] if config.has_key?(:listen)
if Chef::Config.local_mode && !Chef::Config.has_key?(:cookbook_path) && !Chef::Config.has_key?(:chef_repo_path)
Chef::Config.chef_repo_path = Chef::Config.find_chef_repo_path(Dir.pwd)
end
Chef::Config.chef_zero.host = config[:chef_zero_host] if config[:chef_zero_host]
Chef::Config.chef_zero.port = config[:chef_zero_port] if config[:chef_zero_port]
# Expand a relative path from the config directory. Config from command
# line should already be expanded, and absolute paths will be unchanged.
if Chef::Config[:client_key] && config[:config_file]
Chef::Config[:client_key] = File.expand_path(Chef::Config[:client_key], File.dirname(config[:config_file]))
end
Mixlib::Log::Formatter.show_time = false
Chef::Log.init(Chef::Config[:log_location])
Chef::Log.level(Chef::Config[:log_level] || :error)
if Chef::Config[:node_name] && Chef::Config[:node_name].bytesize > 90
# node names > 90 bytes only work with authentication protocol >= 1.1
# see discussion in config.rb.
Chef::Config[:authentication_protocol_version] = "1.1"
end
end
def configure_chef
# knife needs to send logger output to STDERR by default
Chef::Config[:log_location] = STDERR
config_loader = self.class.load_config(config[:config_file])
config[:config_file] = config_loader.config_location
merge_configs
apply_computed_config
# This has to be after apply_computed_config so that Mixlib::Log is configured
Chef::Log.info("Using configuration from #{config[:config_file]}") if config[:config_file]
end
def show_usage
stdout.puts("USAGE: " + self.opt_parser.to_s)
end
def run_with_pretty_exceptions(raise_exception = false)
unless self.respond_to?(:run)
ui.error "You need to add a #run method to your knife command before you can use it"
end
enforce_path_sanity
Chef::LocalMode.with_server_connectivity do
run
end
rescue Exception => e
raise if raise_exception || Chef::Config[:verbosity] == 2
humanize_exception(e)
exit 100
end
def humanize_exception(e)
case e
when SystemExit
raise # make sure exit passes through.
when Net::HTTPServerException, Net::HTTPFatalError
humanize_http_exception(e)
when OpenSSL::SSL::SSLError
ui.error "Could not establish a secure connection to the server."
ui.info "Use `knife ssl check` to troubleshoot your SSL configuration."
ui.info "If your Chef Server uses a self-signed certificate, you can use"
ui.info "`knife ssl fetch` to make knife trust the server's certificates."
ui.info ""
ui.info "Original Exception: #{e.class.name}: #{e.message}"
when Errno::ECONNREFUSED, Timeout::Error, Errno::ETIMEDOUT, SocketError
ui.error "Network Error: #{e.message}"
ui.info "Check your knife configuration and network settings"
when NameError, NoMethodError
ui.error "knife encountered an unexpected error"
ui.info "This may be a bug in the '#{self.class.common_name}' knife command or plugin"
ui.info "Please collect the output of this command with the `-VV` option before filing a bug report."
ui.info "Exception: #{e.class.name}: #{e.message}"
when Chef::Exceptions::PrivateKeyMissing
ui.error "Your private key could not be loaded from #{api_key}"
ui.info "Check your configuration file and ensure that your private key is readable"
when Chef::Exceptions::InvalidRedirect
ui.error "Invalid Redirect: #{e.message}"
ui.info "Change your server location in knife.rb to the server's FQDN to avoid unwanted redirections."
else
ui.error "#{e.class.name}: #{e.message}"
end
end
def humanize_http_exception(e)
response = e.response
case response
when Net::HTTPUnauthorized
ui.error "Failed to authenticate to #{server_url} as #{username} with key #{api_key}"
ui.info "Response: #{format_rest_error(response)}"
when Net::HTTPForbidden
ui.error "You authenticated successfully to #{server_url} as #{username} but you are not authorized for this action"
ui.info "Response: #{format_rest_error(response)}"
when Net::HTTPBadRequest
ui.error "The data in your request was invalid"
ui.info "Response: #{format_rest_error(response)}"
when Net::HTTPNotFound
ui.error "The object you are looking for could not be found"
ui.info "Response: #{format_rest_error(response)}"
when Net::HTTPInternalServerError
ui.error "internal server error"
ui.info "Response: #{format_rest_error(response)}"
when Net::HTTPBadGateway
ui.error "bad gateway"
ui.info "Response: #{format_rest_error(response)}"
when Net::HTTPServiceUnavailable
ui.error "Service temporarily unavailable"
ui.info "Response: #{format_rest_error(response)}"
when Net::HTTPNotAcceptable
version_header = Chef::JSONCompat.from_json(response["x-ops-server-api-version"])
client_api_version = version_header["request_version"]
min_server_version = version_header["min_version"]
max_server_version = version_header["max_version"]
ui.error "The version of Chef that Knife is using is not supported by the Chef server you sent this request to"
ui.info "The request that Knife sent was using API version #{client_api_version}"
ui.info "The Chef server you sent the request to supports a min API verson of #{min_server_version} and a max API version of #{max_server_version}"
ui.info "Please either update your Chef client or server to be a compatible set"
else
ui.error response.message
ui.info "Response: #{format_rest_error(response)}"
end
end
def username
Chef::Config[:node_name]
end
def api_key
Chef::Config[:client_key]
end
# Parses JSON from the error response sent by Chef Server and returns the
# error message
#--
# TODO: this code belongs in Chef::REST
def format_rest_error(response)
Array(Chef::JSONCompat.from_json(response.body)["error"]).join('; ')
rescue Exception
response.body
end
def create_object(object, pretty_name=nil, &block)
output = edit_data(object)
if Kernel.block_given?
output = block.call(output)
else
output.save
end
pretty_name ||= output
self.msg("Created #{pretty_name}")
output(output) if config[:print_after]
end
def delete_object(klass, name, delete_name=nil, &block)
confirm("Do you really want to delete #{name}")
if Kernel.block_given?
object = block.call
else
object = klass.load(name)
object.destroy
end
output(format_for_display(object)) if config[:print_after]
obj_name = delete_name ? "#{delete_name}[#{name}]" : object
self.msg("Deleted #{obj_name}")
end
# helper method for testing if a field exists
# and returning the usage and proper error if not
def test_mandatory_field(field, fieldname)
if field.nil?
show_usage
ui.fatal("You must specify a #{fieldname}")
exit 1
end
end
def rest
@rest ||= begin
require 'chef/rest'
Chef::REST.new(Chef::Config[:chef_server_url])
end
end
def noauth_rest
@rest ||= begin
require 'chef/rest'
Chef::REST.new(Chef::Config[:chef_server_url], false, false)
end
end
def server_url
Chef::Config[:chef_server_url]
end
end
end
| 35.113014 | 155 | 0.675022 |
ed4c63e62ea000772ed94934c56e041382c56c39 | 440 | class ManualPublishTaskAssociationMarshaller
def initialize(dependencies = {})
@decorator = dependencies.fetch(:decorator)
@collection = dependencies.fetch(:collection)
end
def load(manual, _record)
tasks = collection.for_manual(manual)
decorator.call(manual, publish_tasks: tasks)
end
def dump(_manual, _record)
# PublishTasks are read only
nil
end
private
attr_reader :collection, :decorator
end
| 20.952381 | 49 | 0.736364 |
7a9c5dab5575db1f04d05de1628c199208d97751 | 702 | cask 'controlplane' do
if MacOS.release <= :snow_leopard
version '1.2.3'
sha256 '37f93d3a3a17a6e2f24447f0bc74c7e89ec1581ca52e5970960544858c86f909'
else
version '1.6.1'
sha256 '945ae102b81b838df65edf6f83292d33399218113e67bdfdaaa088c0f219ea47'
appcast 'http://www.controlplaneapp.com/appcast.xml',
checkpoint: 'c4b1f509972a926c73775eed1feab651348c557fe7cb383a3f50bd7cd390aa9f'
end
url "http://www.controlplaneapp.com/download/#{version}"
name 'ControlPlane'
homepage 'http://www.controlplaneapp.com/'
license :gpl
depends_on macos: '>= :snow_leopard'
app 'ControlPlane.app'
zap delete: '~/Library/Preferences/com.dustinrue.ControlPlane.plist'
end
| 29.25 | 90 | 0.759259 |
f7149b68b5cdbf32f36899f39d14e6acc2dfdb6d | 7,099 | require 'spec_helper'
describe 'fail2ban', type: :class do
on_supported_os.each do |os, facts|
context "on #{os}" do
let(:facts) do
facts
end
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_anchor('fail2ban::begin') }
it { is_expected.to contain_class('fail2ban::params') }
it { is_expected.to contain_class('fail2ban::install') }
it { is_expected.to contain_class('fail2ban::config') }
it { is_expected.to contain_class('fail2ban::service') }
it { is_expected.to contain_anchor('fail2ban::end') }
describe 'fail2ban::install' do
context 'defaults' do
it do
is_expected.to contain_package('fail2ban').with(
'ensure' => 'present'
)
end
end
context 'when package latest' do
let(:params) do
{
package_ensure: 'latest'
}
end
it do
is_expected.to contain_package('fail2ban').with(
'ensure' => 'latest'
)
end
end
context 'when package absent' do
let(:params) do
{
package_ensure: 'absent',
service_ensure: 'stopped',
service_enable: false
}
end
it do
is_expected.to contain_package('fail2ban').with(
'ensure' => 'absent'
)
end
it do
is_expected.to contain_file('fail2ban.conf').with(
'ensure' => 'present',
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
it do
is_expected.to contain_service('fail2ban').with(
'ensure' => 'stopped',
'enable' => false
)
end
end
context 'when package purged' do
let(:params) do
{
package_ensure: 'purged',
service_ensure: 'stopped',
service_enable: false
}
end
it do
is_expected.to contain_package('fail2ban').with(
'ensure' => 'purged'
)
end
it do
is_expected.to contain_file('fail2ban.conf').with(
'ensure' => 'absent',
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
it do
is_expected.to contain_service('fail2ban').with(
'ensure' => 'stopped',
'enable' => false
)
end
end
end
describe 'fail2ban::config' do
context 'defaults' do
it do
is_expected.to contain_file('fail2ban.conf').with(
'ensure' => 'present',
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
end
context 'when source dir' do
let(:params) do
{
config_dir_source: 'puppet:///modules/fail2ban/wheezy/etc/fail2ban'
}
end
it do
is_expected.to contain_file('fail2ban.dir').with(
'ensure' => 'directory',
'force' => false,
'purge' => false,
'recurse' => true,
'source' => 'puppet:///modules/fail2ban/wheezy/etc/fail2ban',
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
end
context 'when source dir purged' do
let(:params) do
{
config_dir_purge: true,
config_dir_source: 'puppet:///modules/fail2ban/wheezy/etc/fail2ban'
}
end
it do
is_expected.to contain_file('fail2ban.dir').with(
'ensure' => 'directory',
'force' => true,
'purge' => true,
'recurse' => true,
'source' => 'puppet:///modules/fail2ban/wheezy/etc/fail2ban',
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
end
context 'when source file' do
let(:params) do
{
config_file_source: 'puppet:///modules/fail2ban/wheezy/etc/fail2ban/jail.conf'
}
end
it do
is_expected.to contain_file('fail2ban.conf').with(
'ensure' => 'present',
'source' => 'puppet:///modules/fail2ban/wheezy/etc/fail2ban/jail.conf',
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
end
context 'when content string' do
let(:params) do
{
config_file_string: '# THIS FILE IS MANAGED BY PUPPET'
}
end
it do
is_expected.to contain_file('fail2ban.conf').with(
'ensure' => 'present',
'content' => %r{THIS FILE IS MANAGED BY PUPPET},
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
end
context 'when content template' do
let(:params) do
{
config_file_template: 'fail2ban/wheezy/etc/fail2ban/jail.conf.erb'
}
end
it do
is_expected.to contain_file('fail2ban.conf').with(
'ensure' => 'present',
'content' => %r{THIS FILE IS MANAGED BY PUPPET},
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
end
context 'when content template (custom)' do
let(:params) do
{
config_file_template: 'fail2ban/wheezy/etc/fail2ban/jail.conf.erb',
config_file_options_hash: {
'key' => 'value'
}
}
end
it do
is_expected.to contain_file('fail2ban.conf').with(
'ensure' => 'present',
'content' => %r{THIS FILE IS MANAGED BY PUPPET},
'notify' => 'Service[fail2ban]',
'require' => 'Package[fail2ban]'
)
end
end
end
describe 'fail2ban::service' do
context 'defaults' do
it do
is_expected.to contain_service('fail2ban').with(
'ensure' => 'running',
'enable' => true
)
end
end
context 'when service stopped' do
let(:params) do
{
service_ensure: 'stopped'
}
end
it do
is_expected.to contain_service('fail2ban').with(
'ensure' => 'stopped',
'enable' => true
)
end
end
end
end
end
end
| 28.170635 | 92 | 0.461473 |
2682868bda0579bf727ea3a0e670af421932c198 | 3,590 | # encoding: utf-8
require 'spec_helper'
describe Github::Client::Repos, '#list' do
let(:user) { 'peter-murach' }
let(:repo) { 'github' }
let(:request_path) { "/user/repos?access_token=#{OAUTH_TOKEN}" }
let(:body) { fixture('repos/repos.json') }
let(:status) { 200 }
after { reset_authentication_for subject }
it { should respond_to(:find) }
context "resource found for authenticated user" do
before {
subject.oauth_token = OAUTH_TOKEN
stub_get(request_path).to_return(:body => body, :status => status,
:headers => {:content_type => "application/json; charset=utf-8"} )
stub_get(request_path + "&sort=pushed").
to_return(:body => fixture('repos/repos_sorted_by_pushed.json'), :status => 200,:headers => {:content_type => "application/json; charset=utf-8"} )
}
it "falls back to all if user is unauthenticated" do
subject.oauth_token = nil
stub_get("/user/repos").to_return(:body => '[]', :status => 200,
:headers => {:content_type => "application/json; charset=utf-8"} )
subject.list
a_get('/user/repos').should have_been_made
end
it "should get the resources" do
subject.list
a_get(request_path).should have_been_made
end
it_should_behave_like 'an array of resources' do
let(:requestable) { subject.list }
end
it "should return array of resources sorted by pushed_at time" do
repositories = subject.list(:sort => 'pushed')
repositories.first.name.should == "Hello-World-2"
end
it "should get resource information" do
repositories = subject.list
repositories.first.name.should == 'Hello-World'
end
it "should yield result to a block" do
yielded = []
result = subject.list { |obj| yielded << obj }
yielded.should == result
end
end
context 'all repositories' do
let(:request_path) { '/repositories' }
before {
stub_get(request_path).to_return(:body => body, :status => status,
:headers => {:content_type => "application/json; charset=utf-8"} )
}
it "should get the resources" do
subject.list :every
a_get(request_path).should have_been_made
end
end
context "resource found for organization" do
let(:org) { '37signals' }
let(:request_path) { "/orgs/#{org}/repos" }
before {
stub_get(request_path).to_return(:body => body, :status => status,
:headers => {:content_type => "application/json; charset=utf-8"} )
}
it "should get the resources" do
subject.list :org => org
a_get(request_path).should have_been_made
end
end
context "resource found for a user" do
let(:request_path) { "/users/#{user}/repos" }
before {
stub_get(request_path).to_return(:body => body, :status => status,
:headers => {:content_type => "application/json; charset=utf-8"} )
}
it "should filter the parameters" do
subject.list 'user' => user, :unknown => true
a_get(request_path).with(body: {}).should have_been_made
end
it "should get the resources" do
subject.list :user => user
a_get(request_path).should have_been_made
end
end
context "rosource not found for authenticated user" do
before {
subject.oauth_token = OAUTH_TOKEN
stub_get(request_path).to_return(:body => '', :status => 404,
:headers => {:content_type => "application/json; charset=utf-8"} )
}
it "fail to find resources" do
expect { subject.list }.to raise_error(Github::Error::NotFound)
end
end
end # list
| 30.168067 | 154 | 0.64039 |
4a7cd8e3f5d68faa3f547e67711e78ff4452c7c5 | 526 | module Qernel::Plugins
class ResettableSlots
include Plugin
after :calculation, :reset_slots_to_zero
def reset_slots_to_zero
@graph.nodes.each do |node|
node.slots.each do |slot|
if slot.reset_to_zero == true
slot.conversion = 0.0
slot.edges.each do |edge|
edge.share = 0.0
edge.value = 0.0
end
end # if
end # node.slots.each
end # nodes.each
end
end # ResettableSlots
end # Qernel::Plugins
| 22.869565 | 44 | 0.572243 |
bb87bd843072d41e726a4f5524e0655c03234e83 | 510 | class EsiFundViewAdapter < DocumentViewAdapter
attributes = [
:title,
:summary,
:body,
:fund_state,
:fund_type,
:location,
:funding_source,
:closing_date,
]
attributes.each do |attribute_name|
define_method(attribute_name) do
delegate_if_document_exists(attribute_name)
end
end
def self.model_name
ActiveModel::Name.new(self, nil, "EsiFund")
end
private
def finder_schema
SpecialistPublisherWiring.get(:esi_fund_finder_schema)
end
end
| 17 | 58 | 0.701961 |
9106c5023f2f00020594ac6399d950bd4a9b8c2b | 942 | # frozen_string_literal: true
module Babik
module QuerySet
# select_related functionality of QuerySet
module RelatedSelector
# Load the related objects of each model object specified by the association_paths
#
# e.g.
# - User.objects.filter(first_name: 'Julius').select_related(:group)
# - User.objects.filter(first_name: 'Cassius').select_related([:group, :zone])
# - Post.objects.select_related(:author)
#
# @param association_paths [Array<Symbol>, Symbol] Array of association paths
# of belongs_to and has_one related objects.
# A passed symbol will be considered as an array of one symbol.
# That is, select_related(:group) is equal to select_related([:group])
def select_related!(association_paths)
@_select_related = Babik::QuerySet::SelectRelated.new(@model, association_paths)
self
end
end
end
end | 33.642857 | 88 | 0.674098 |
03e5c6a5eff67e1abe5c84d6ab9471fb2f460b84 | 460 | # encoding: utf-8
# frozen_string_literal: true
require 'active_support/all'
module ViaCep
# Utils module
class Utils
class << self
def handle_whitespaces(string)
string.split(' ').join('%20')
end
def handle_accents(string)
ActiveSupport::Inflector.transliterate(string)
end
def parameterize(string)
string = handle_accents(string)
handle_whitespaces(string)
end
end
end
end
| 19.166667 | 54 | 0.652174 |
b98892ecc1649682f8dd4b6f0169307d9eae2735 | 825 | #
# Cookbook Name:: kernel-modules
# Author:: Jeremy MAURO <[email protected]>
#
# Copyright 2016, Criteo.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'serverspec'
if (/cygwin|mswin|mingw|bccwin|wince|emx/ =~ RUBY_PLATFORM).nil?
set :backend, :exec
else
set :backend, :cmd
set :os, family: 'windows'
end
| 29.464286 | 74 | 0.738182 |
79ec3de2f084e678db285e383cd2d03aa8254648 | 619 | Pod::Spec.new do |s|
s.name = "PPDBLUtils"
s.version = "1.0.1"
s.summary = "测试pod生成PPDBLUtils的framework"
s.description = "测试用的,用了测试使用pod lib create 生产的framework工程"
s.homepage = "https://github.com/liu521227/PPDBLUtils.git"
s.license = 'MIT'
s.author = { "liu521227" => "[email protected]" }
s.source = { :git => "https://github.com/liu521227/PPDBLUtils.git", :tag => s.version.to_s }
s.source_files = 'Classes', "Classes/**/*.{h,m}"
s.platform = :ios, '8.0'
s.requires_arc = true
s.frameworks = 'UIKit', 'CoreText'
end | 41.266667 | 104 | 0.575121 |
619e5de25bd668ddc8a316894d91e79d6bc7af60 | 3,825 |
require 'socket'
require 'thread'
require 'discordrb'
require_relative 'reudy_common'
module Gimite
class DiscordBot
include Gimite
SILENT_SECOND = 20.0 #沈黙が続いたと判断する秒数。
def initialize(user, logOut = STDOUT)
@user = user #Reudyオブジェクトを設定
@token = @user.settings[:disc_token] #BOTトークンを取得
@bot = Discordrb::Bot.new(token: @token) #Discordクライアントオブジェクト
@user.client = self
@queue = Queue.new
@prevTime = Time.now #onSilent用。
@chid = @user.settings[:disc_channel].to_i
@logid = @user.settings[:disc_log].to_i
@now_message = nil
@now_channel = nil
begin
@log_channel = @bot.channel(@logid)
rescue
end
@isExitting = nil
threads = []
threads << Thread.new { processLoop(@token) }
threads << Thread.fork { message_shori() }
threads.each { |thr| thr.join }
end
def status=(status)
end
#exitがないってエラーの対策
def exit
puts "終了"
end
#@queueのメッセージを処理する
def message_shori
loop do
sleep(@user.settings[:wait_before_speak].to_f * (0.5 + rand)) if @user.settings[:wait_before_speak]
if [email protected]?
args = @queue.pop
if @queue.empty?
@user.onOtherSpeak(*(args + [false]))
else
@user.onOtherSpeak(*(args + [true]))
end
end
time = Time.now
if time - @prevTime >= SILENT_SECOND
puts "沈黙を検知"
@prevTime = time
@user.onSilent
end
end
end
#発言
def speak(s)
begin
puts "メッセージ: #{s}"
if @now_channel != nil
@now_channel.start_typing()
end
if s.length / (@user.mode * 1.6) <= 7
sleep(s.length / (@user.mode * 1.6))
else
sleep(7)
end
@now_channel.send(s)
rescue => e
end
end
#実行可能なコマンドを実行
def command(s)
puts "コマンド: #{s}"
begin
if s =~ /!command disc_move (.+) (.+)/
result = @bot.channel($1.to_i)
puts "#{result.name}"
unless result == {} || result == []
@now_channel = result
@chid = result.id
return true
end
end
return false
rescue => e
end
end
#ログを出力
def outputInfo(x)
if @user.settings[:teacher_mode]
puts x
@log_channel.send(x)
else
puts x
end
end
#botの重要なプロセス。
#メッセージを受信。
def processLoop(token)
puts "プロセススタート"
# サーバから受け取ったメッセージを処理
@bot.message do |event|
unless event.message.content == ''
if event.channel.id != @logid && (event.channel.id == @chid || @user.settings[:nicks].any? {|n| event.message.content.include?(n)} || event.channel.type == 1)
if event.channel.id != @chid
@user.onDiscMove(event.channel.id, event.channel.name, event.user.name)
end
@chid = event.channel.id
@now_message = event.message
@now_channel = event.channel
puts "> メッセージ受信: #{event.message.content}"
abc = event.message.content.gsub(/\R/, " ").gsub(/<@(.*?)> /, "お客さん").gsub(/ <@(.*?)>/, "お客さん").gsub(/ <@(.*?)> /, "お客さん").gsub(/<@(.*?)>/, "お客さん").gsub(/@here/, "みんな").gsub(/@everyone/, "みんな")
@prevTime = Time.now
@queue.push([event.user.name, abc])
elsif event.channel.id == @logid
puts "> コンソール受信: #{event.message.content}"
@user.onControlMsg(event.message.content)
@log_message = event.message
end
end
end
@bot.run
end
end
end
| 26.93662 | 206 | 0.511634 |
e9e2d01ed64efdf7370e50547e87eb6d3d47a012 | 232 | class CreateBookListInfos < ActiveRecord::Migration[6.0]
def change
create_table :book_list_infos, id: :uuid do |t|
t.string :book_id
t.string :user_id
t.string :list_id
t.timestamps
end
end
end
| 19.333333 | 56 | 0.663793 |
79bacb486b928cd18329cfe00a52cd997153e3f2 | 571 | cask 'freesmug-chromium' do
version '54.0.2840.71'
sha256 'f64cf586d017d737288e29556ccbf6c69d8002fcfdf96e0d48a3a981111e4f59'
# sourceforge.net/osxportableapps was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/osxportableapps/Chromium_OSX_#{version}.dmg"
appcast 'https://sourceforge.net/projects/osxportableapps/rss?path=/Chromium',
checkpoint: '53c130d3a82eb72803759b753728e20bdbce23b365a43a18da2abeecc9989604'
name 'Chromium'
homepage 'http://www.freesmug.org/chromium'
app 'Chromium.app'
end
| 40.785714 | 94 | 0.798599 |
b9a275bb5d6c52be392a3f05beac25d2c01587d3 | 566 | module Less2Sass
module Less
module Tree
class MixinDefinitionNode < Node
attr_accessor :name
attr_accessor :selectors
attr_accessor :params
attr_accessor :condition
attr_accessor :variadic
attr_accessor :arity
attr_accessor :rules
attr_accessor :_lookups
attr_accessor :required
attr_accessor :optionalParameters
attr_accessor :frames
# @see Node#creates_environment?
def creates_environment?
true
end
end
end
end
end
| 22.64 | 41 | 0.634276 |
ffe0a75ee973ec9ebf58324bbee9ce08360f1721 | 3,473 | # This code was ported from the java version available here:
# http://grepcode.com/file/repo1.maven.org/maven2/com.ibm.icu/icu4j/51.2/com/ibm/icu/text/RBNFChinesePostProcessor.java
# This code is incomplete, untested, and unused. It should remain here until
# I can figure out why it's necessary in ICU and wether to make use of it here or not.
RULE_SET_NAMES = ["traditional", "simplified", "accounting", "time"]
DIAN = 40670 # decimal point
MARKERS = [
[33836, 20740, 20806, 12295], # marker chars, last char is the 'zero'
[19975, 20159, 20806, 12295],
[33836, 20740, 20806, 38646]
# need markers for time?
]
def process(str, rule_set)
# markers depend on what rule set we are using
buf = str.unpack("U*")
name = rule_set.name
format = RULE_SET_NAMES.find_index { |rule_set_name| rule_set.name == rule_set_name }
long_form = format == 1 || format == 3
if long_form
i = buf.index("*".ord)
while i != -1
buf.delete(i...i + 1)
i = buf.index("*".ord)
end
else
# remove unwanted lings
# a '0' (ling) with * might be removed
# mark off 10,000 'chunks', markers are Z, Y, W (zhao, yii, and wan)
# already, we avoid two lings in the same chunk -- ling without * wins
# now, just need to avoid optional lings in adjacent chunks
# process right to left
# decision matrix:
# state, situation
# state none opt. req.
# ----- ---- ---- ----
# none to right none opt. req.
# opt. to right none clear, none clear right, req.
# req. to right none clear, none req.
# mark chunks with '|' for convenience
m = MARKERS[format]
0.upto(m.length - 2) do |i|
n = buf.index(m[i])
if n != -1
buf.insert(n + m[i].length, '|'.ord)
end
end
x = buf.index(DIAN)
x = buf.length if x == -1
s = 0 # 0 = none to right, 1 = opt. to right, 2 = req. to right
n = -1 # previous optional ling
ling = MARKERS[format][3]
while x >= 0
m = buf.rindex("|", x)
nn = buf.rindex(ling, x)
ns = 0
if nn > m
ns = (nn > 0 && buf[nn - 1] != '*'.ord) ? 2 : 1
end
x = m - 1
# actually much simpler, but leave this verbose for now so it's easier to follow
case s * 3 + ns
when 0 # none, none
s = ns # redundant
n = -1
when 1 # none, opt.
s = ns
n = nn # remember optional ling to right
when 2 # none, req.
s = ns
n = -1
when 3 # opt., none
s = ns
n = -1
when 4 # opt., opt.
# n + ling.length
buf.delete((nn - 1)...(nn + 1)) # delete current optional ling
s = 0
n = -1
when 5 # opt., req.
# n + ling.length
buf.delete((n - 1)...(n + 1)) # delete previous optional ling
s = ns
n = -1
when 6 # req., none
s = ns
n = -1
when 7 # req., opt.
# nn + ling.length
buf.delete((nn - 1)...(nn + 1)) # delete current optional ling
s = 0
n = -1
when 8 # req., req.
s = ns
n = -1
else
raise "Illegal state"
end
end
buf.length.downto(0) do |i|
if buf[i] == "*".ord || buf[i] == "|".ord
buf.delete(i...i + 1)
end
end
end
buf.pack("U*")
end
| 28.235772 | 119 | 0.514829 |
01219b856d3e5a64896a5ba7736926247db16c3d | 710 | module MoneyRails
module ActiveRecord
module MigrationExtensions
class OptionsExtractor
def self.extract(attribute, table_name, accessor, options = {})
default = MoneyRails::Configuration.send("#{attribute}_column").merge(options[attribute] || {})
default[:column_name] ||= [default[:prefix], accessor, default[:postfix]].join
default[:table_name] = table_name
excluded_keys = [:amount, :currency, :type, :prefix, :postfix, :present, :column_name, :table_name]
default[:options] = default.except(*excluded_keys)
default.slice(:present, :table_name, :column_name, :type, :options).values
end
end
end
end
end
| 35.5 | 109 | 0.657746 |
e866257b0d30b56e05df4b03708aea2a1ceb14a1 | 353 | class AddFieldsToSpreeLoyaltyPointsTransaction < ActiveRecord::Migration[4.2]
def change
add_column :spree_loyalty_points_transactions, :source_type, :string
rename_column :spree_loyalty_points_transactions, :order_id, :source_id
add_column :spree_loyalty_points_transactions, :updated_balance, :integer, default: 0, null: false
end
end
| 44.125 | 102 | 0.815864 |
f78ee48c1dc4e52ba8d27b1e27dec9430b8fa9fb | 259 | require "test_helper"
class HomeControllerTest < ActionDispatch::IntegrationTest
test "should get index" do
get home_index_url
assert_response :success
end
test "should get show" do
get home_show_url
assert_response :success
end
end
| 18.5 | 58 | 0.752896 |
7aad3e5ca52ff2343f1591065efd8d24c525f657 | 32,039 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2018_01_01
#
# ApiManagement Client
#
class ProductPolicy
include MsRestAzure
#
# Creates and initializes a new instance of the ProductPolicy class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [ApiManagementClient] reference to the ApiManagementClient
attr_reader :client
#
# Get the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PolicyCollection] operation results.
#
def list_by_product(resource_group_name, service_name, product_id, custom_headers:nil)
response = list_by_product_async(resource_group_name, service_name, product_id, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_product_with_http_info(resource_group_name, service_name, product_id, custom_headers:nil)
list_by_product_async(resource_group_name, service_name, product_id, custom_headers:custom_headers).value!
end
#
# Get the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_product_async(resource_group_name, service_name, product_id, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'product_id is nil' if product_id.nil?
fail ArgumentError, "'product_id' should satisfy the constraint - 'MaxLength': '80'" if !product_id.nil? && product_id.length > 80
fail ArgumentError, "'product_id' should satisfy the constraint - 'MinLength': '1'" if !product_id.nil? && product_id.length < 1
fail ArgumentError, "'product_id' should satisfy the constraint - 'Pattern': '(^[\w]+$)|(^[\w][\w\-]+[\w]$)'" if !product_id.nil? && product_id.match(Regexp.new('^(^[\w]+$)|(^[\w][\w\-]+[\w]$)$')).nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'subscriptionId' => @client.subscription_id,'productId' => product_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2018_01_01::Models::PolicyCollection.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Get the ETag of the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def get_entity_tag(resource_group_name, service_name, product_id, custom_headers:nil)
response = get_entity_tag_async(resource_group_name, service_name, product_id, custom_headers:custom_headers).value!
nil
end
#
# Get the ETag of the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_entity_tag_with_http_info(resource_group_name, service_name, product_id, custom_headers:nil)
get_entity_tag_async(resource_group_name, service_name, product_id, custom_headers:custom_headers).value!
end
#
# Get the ETag of the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_entity_tag_async(resource_group_name, service_name, product_id, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'product_id is nil' if product_id.nil?
fail ArgumentError, "'product_id' should satisfy the constraint - 'MaxLength': '80'" if !product_id.nil? && product_id.length > 80
fail ArgumentError, "'product_id' should satisfy the constraint - 'MinLength': '1'" if !product_id.nil? && product_id.length < 1
fail ArgumentError, "'product_id' should satisfy the constraint - 'Pattern': '(^[\w]+$)|(^[\w][\w\-]+[\w]$)'" if !product_id.nil? && product_id.match(Regexp.new('^(^[\w]+$)|(^[\w][\w\-]+[\w]$)$')).nil?
policy_id = 'policy'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies/{policyId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'subscriptionId' => @client.subscription_id,'productId' => product_id,'policyId' => policy_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:head, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Get the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PolicyContract] operation results.
#
def get(resource_group_name, service_name, product_id, custom_headers:nil)
response = get_async(resource_group_name, service_name, product_id, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, service_name, product_id, custom_headers:nil)
get_async(resource_group_name, service_name, product_id, custom_headers:custom_headers).value!
end
#
# Get the policy configuration at the Product level.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, service_name, product_id, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'product_id is nil' if product_id.nil?
fail ArgumentError, "'product_id' should satisfy the constraint - 'MaxLength': '80'" if !product_id.nil? && product_id.length > 80
fail ArgumentError, "'product_id' should satisfy the constraint - 'MinLength': '1'" if !product_id.nil? && product_id.length < 1
fail ArgumentError, "'product_id' should satisfy the constraint - 'Pattern': '(^[\w]+$)|(^[\w][\w\-]+[\w]$)'" if !product_id.nil? && product_id.match(Regexp.new('^(^[\w]+$)|(^[\w][\w\-]+[\w]$)$')).nil?
policy_id = 'policy'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies/{policyId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'subscriptionId' => @client.subscription_id,'productId' => product_id,'policyId' => policy_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2018_01_01::Models::PolicyContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates or updates policy configuration for the Product.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param parameters [PolicyContract] The policy contents to apply.
# @param if_match [String] ETag of the Entity. Not required when creating an
# entity, but required when updating an entity.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PolicyContract] operation results.
#
def create_or_update(resource_group_name, service_name, product_id, parameters, if_match:nil, custom_headers:nil)
response = create_or_update_async(resource_group_name, service_name, product_id, parameters, if_match:if_match, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates or updates policy configuration for the Product.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param parameters [PolicyContract] The policy contents to apply.
# @param if_match [String] ETag of the Entity. Not required when creating an
# entity, but required when updating an entity.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def create_or_update_with_http_info(resource_group_name, service_name, product_id, parameters, if_match:nil, custom_headers:nil)
create_or_update_async(resource_group_name, service_name, product_id, parameters, if_match:if_match, custom_headers:custom_headers).value!
end
#
# Creates or updates policy configuration for the Product.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param parameters [PolicyContract] The policy contents to apply.
# @param if_match [String] ETag of the Entity. Not required when creating an
# entity, but required when updating an entity.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def create_or_update_async(resource_group_name, service_name, product_id, parameters, if_match:nil, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'product_id is nil' if product_id.nil?
fail ArgumentError, "'product_id' should satisfy the constraint - 'MaxLength': '80'" if !product_id.nil? && product_id.length > 80
fail ArgumentError, "'product_id' should satisfy the constraint - 'MinLength': '1'" if !product_id.nil? && product_id.length < 1
fail ArgumentError, "'product_id' should satisfy the constraint - 'Pattern': '(^[\w]+$)|(^[\w][\w\-]+[\w]$)'" if !product_id.nil? && product_id.match(Regexp.new('^(^[\w]+$)|(^[\w][\w\-]+[\w]$)$')).nil?
policy_id = 'policy'
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['If-Match'] = if_match unless if_match.nil?
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::ApiManagement::Mgmt::V2018_01_01::Models::PolicyContract.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies/{policyId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'productId' => product_id,'policyId' => policy_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 201 || status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2018_01_01::Models::PolicyContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2018_01_01::Models::PolicyContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes the policy configuration at the Product.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param if_match [String] ETag of the Entity. ETag should match the current
# entity state from the header response of the GET request or it should be *
# for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def delete(resource_group_name, service_name, product_id, if_match, custom_headers:nil)
response = delete_async(resource_group_name, service_name, product_id, if_match, custom_headers:custom_headers).value!
nil
end
#
# Deletes the policy configuration at the Product.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param if_match [String] ETag of the Entity. ETag should match the current
# entity state from the header response of the GET request or it should be *
# for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def delete_with_http_info(resource_group_name, service_name, product_id, if_match, custom_headers:nil)
delete_async(resource_group_name, service_name, product_id, if_match, custom_headers:custom_headers).value!
end
#
# Deletes the policy configuration at the Product.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param product_id [String] Product identifier. Must be unique in the current
# API Management service instance.
# @param if_match [String] ETag of the Entity. ETag should match the current
# entity state from the header response of the GET request or it should be *
# for unconditional update.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def delete_async(resource_group_name, service_name, product_id, if_match, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'product_id is nil' if product_id.nil?
fail ArgumentError, "'product_id' should satisfy the constraint - 'MaxLength': '80'" if !product_id.nil? && product_id.length > 80
fail ArgumentError, "'product_id' should satisfy the constraint - 'MinLength': '1'" if !product_id.nil? && product_id.length < 1
fail ArgumentError, "'product_id' should satisfy the constraint - 'Pattern': '(^[\w]+$)|(^[\w][\w\-]+[\w]$)'" if !product_id.nil? && product_id.match(Regexp.new('^(^[\w]+$)|(^[\w][\w\-]+[\w]$)$')).nil?
policy_id = 'policy'
fail ArgumentError, 'if_match is nil' if if_match.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['If-Match'] = if_match unless if_match.nil?
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies/{policyId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'productId' => product_id,'policyId' => policy_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 204
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
end
end
| 55.049828 | 233 | 0.700272 |
084642dbe05c19697e70a7e1e7bdbf36a4d478c0 | 1,267 | # encoding: utf-8
require 'jeweler'
Jeweler::Tasks.new do |gem|
# gem is a Gem::Specification... see http://docs.rubygems.org/read/chapter/20 for more options
gem.name = "coupler"
gem.homepage = "http://github.com/coupler/coupler"
gem.license = "MIT"
gem.summary = %Q{Coupler is a desktop application for linking datasets together}
gem.description = %Q{Coupler is a (JRuby) desktop application designed to link datasets together}
gem.email = "[email protected]"
gem.authors = ["Jeremy Stephens"]
gem.platform = 'java'
# dependencies defined in Gemfile
end
Jeweler::RubygemsDotOrgTasks.new
#require 'rake/testtask'
#Rake::TestTask.new(:test) do |test|
#test.libs << 'lib' << 'test'
#test.pattern = 'test/**/test_*.rb'
#test.verbose = true
#end
#require 'rcov/rcovtask'
#Rcov::RcovTask.new do |test|
#test.libs << 'test'
#test.pattern = 'test/**/test_*.rb'
#test.verbose = true
#test.rcov_opts << '--exclude "gems/*"'
#end
#task :default => :test
#require 'rake/rdoctask'
#Rake::RDocTask.new do |rdoc|
#version = File.exist?('VERSION') ? File.read('VERSION') : ""
#rdoc.rdoc_dir = 'rdoc'
#rdoc.title = "coupler #{version}"
#rdoc.rdoc_files.include('README*')
#rdoc.rdoc_files.include('lib/**/*.rb')
#end
| 28.795455 | 99 | 0.681137 |
33c669befb1477b27ae144a1c3068176f9ae09cd | 4,741 | require 'spec_helper'
describe Icalendar::Calendar do
context 'values' do
let(:property) { 'my-value' }
%w(prodid version calscale ip_method).each do |prop|
it "##{prop} sets and gets" do
subject.send("#{prop}=", property)
expect(subject.send prop).to eq property
end
end
it "sets and gets custom properties" do
subject.x_custom_prop = property
expect(subject.x_custom_prop).to eq [property]
end
it 'can set params on a property' do
subject.prodid.ical_params = {'hello' => 'world'}
expect(subject.prodid.value).to eq 'icalendar-ruby'
expect(subject.prodid.ical_params).to eq('hello' => 'world')
end
context "required values" do
it 'is not valid when prodid is not set' do
subject.prodid = nil
expect(subject).to_not be_valid
end
it 'is not valid when version is not set' do
subject.version = nil
expect(subject).to_not be_valid
end
it 'is valid when both prodid and version are set' do
subject.version = '2.0'
subject.prodid = 'my-product'
expect(subject).to be_valid
end
it 'is valid by default' do
expect(subject).to be_valid
end
end
end
context 'components' do
let(:ical_component) { double 'Component', name: 'event', :'parent=' => nil }
%w(event todo journal freebusy timezone).each do |component|
it "##{component} adds a new component" do
expect(subject.send "#{component}").to be_a_kind_of Icalendar::Component
end
it "##{component} passes a component to a block to build parts" do
expect { |b| subject.send("#{component}", &b) }.to yield_with_args Icalendar::Component
end
it "##{component} can be passed in" do
expect { |b| subject.send("#{component}", ical_component, &b) }.to yield_with_args ical_component
expect(subject.send "#{component}", ical_component).to eq ical_component
end
end
it "adds event to events list" do
subject.event ical_component
expect(subject.events).to eq [ical_component]
end
describe '#add_event' do
it 'delegates to non add_ version' do
expect(subject).to receive(:event).with(ical_component)
subject.add_event ical_component
end
end
describe '#find_event' do
let(:ical_component) { double 'Component', uid: 'uid' }
let(:other_component) { double 'Component', uid: 'other' }
before(:each) do
subject.events << other_component
subject.events << ical_component
end
it 'finds by uid' do
expect(subject.find_event 'uid').to eq ical_component
end
end
describe '#find_timezone' do
let(:ical_timezone) { double 'Timezone', tzid: 'Eastern' }
let(:other_timezone) { double 'Timezone', tzid: 'Pacific' }
before(:each) do
subject.timezones << other_timezone
subject.timezones << ical_timezone
end
it 'finds by tzid' do
expect(subject.find_timezone 'Eastern').to eq ical_timezone
end
end
it "adds reference to parent" do
e = subject.event
expect(e.parent).to eq subject
end
it 'can be added with add_x_ for custom components' do
expect(subject.add_x_custom_component).to be_a_kind_of Icalendar::Component
expect { |b| subject.add_x_custom_component(&b) }.to yield_with_args Icalendar::Component
expect(subject.add_x_custom_component ical_component).to eq ical_component
end
end
describe '#to_ical' do
before(:each) do
Timecop.freeze DateTime.new(2013, 12, 26, 5, 0, 0, '+0000')
subject.event do |e|
e.summary = 'An event'
e.dtstart = "20140101T000000Z"
e.dtend = "20140101T050000Z"
e.geo = [-1.2, -2.1]
end
subject.freebusy do |f|
f.dtstart = "20140102T080000Z"
f.dtend = "20140102T100000Z"
f.comment = 'Busy'
end
end
after(:each) do
Timecop.return
end
it 'outputs properties and components' do
expected_no_uid = <<-EOICAL.gsub("\n", "\r\n")
BEGIN:VCALENDAR
VERSION:2.0
PRODID:icalendar-ruby
CALSCALE:GREGORIAN
BEGIN:VEVENT
DTSTAMP:20131226T050000Z
DTSTART:20140101T000000Z
DTEND:20140101T050000Z
GEO:-1.2;-2.1
SUMMARY:An event
END:VEVENT
BEGIN:VFREEBUSY
DTSTAMP:20131226T050000Z
DTSTART:20140102T080000Z
DTEND:20140102T100000Z
COMMENT:Busy
END:VFREEBUSY
END:VCALENDAR
EOICAL
expect(subject.to_ical.gsub(/^UID:.*\r\n(?: .*\r\n)*/, '')).to eq expected_no_uid
end
end
describe '#publish' do
it 'sets ip_method to "PUBLISH"' do
subject.publish
expect(subject.ip_method).to eq 'PUBLISH'
end
end
end
| 28.220238 | 105 | 0.64691 |
91eb5508a1efbeb4ea2c0f67c2bae603a8e8c46d | 1,028 | data = File.readlines("#{__dir__}/data.txt").map(&:strip)
def check_row s, r
return r[0] if s.empty?
mid = (r.size / 2).floor
char = s.first
if char == "F"
crange = (r.first..r[mid]).to_a
return check_row s.drop(1), crange
elsif char == "B"
crange = (r[mid]..r.last).to_a
return check_row s.drop(1), crange
end
end
def check_col s, r
return r[0] if s.empty?
mid = (r.size / 2).floor
char = s.first
if char == "L"
crange = (r.first..r[mid]).to_a
return check_col s.drop(1), crange
elsif char == "R"
crange = (r[mid]..r.last).to_a
return check_col s.drop(1), crange
end
end
def check_seat_id p
rows = p[0..6].split('')
cols = p[7..-1].split('')
r_range = (0...128).to_a
c_range = (0...8).to_a
row = check_row rows, r_range
col = check_col cols, c_range
return (row * 8) + col
end
def check_highest dt
dt.map do |d|
check_seat_id(d)
end.max
end
puts check_highest data
| 21.87234 | 57 | 0.564202 |
61b0da88f910aad578622a29000e5aae660cc4a5 | 601 | # frozen_string_literal: true
module ActsAsMessageable
autoload :Model, 'acts_as_messageable/model'
autoload :Scopes, 'acts_as_messageable/scopes'
autoload :Message, 'acts_as_messageable/message'
autoload :Relation, 'acts_as_messageable/relation'
autoload :Rails3, 'acts_as_messageable/rails3'
autoload :Rails4, 'acts_as_messageable/rails4'
autoload :Rails6, 'acts_as_messageable/rails6'
def self.rails_api
if Rails::VERSION::MAJOR >= 6
Rails6
elsif Rails::VERSION::MAJOR >= 4
Rails4
else
Rails3
end
end
end
require 'acts_as_messageable/railtie'
| 25.041667 | 52 | 0.745424 |
f8b4eebb7a8ac8d5123eeb4210c62a07f7f1f820 | 5,783 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Resources::Mgmt::V2018_02_01
#
# A service client - single point of access to the REST API.
#
class ResourceManagementClient < MsRestAzure::AzureServiceClient
include MsRestAzure
include MsRestAzure::Serialization
# @return [String] the base URI of the service.
attr_accessor :base_url
# @return Credentials needed for the client to connect to Azure.
attr_reader :credentials
# @return [String] The ID of the target subscription.
attr_accessor :subscription_id
# @return [String] The API version to use for this operation.
attr_reader :api_version
# @return [String] The preferred language for the response.
attr_accessor :accept_language
# @return [Integer] The retry timeout in seconds for Long Running
# Operations. Default value is 30.
attr_accessor :long_running_operation_retry_timeout
# @return [Boolean] Whether a unique x-ms-client-request-id should be
# generated. When set to true a unique x-ms-client-request-id value is
# generated and included in each request. Default is true.
attr_accessor :generate_client_request_id
# @return [Deployments] deployments
attr_reader :deployments
# @return [Providers] providers
attr_reader :providers
# @return [Resources] resources
attr_reader :resources
# @return [ResourceGroups] resource_groups
attr_reader :resource_groups
# @return [Tags] tags
attr_reader :tags
# @return [DeploymentOperations] deployment_operations
attr_reader :deployment_operations
#
# Creates initializes a new instance of the ResourceManagementClient class.
# @param credentials [MsRest::ServiceClientCredentials] credentials to authorize HTTP requests made by the service client.
# @param base_url [String] the base URI of the service.
# @param options [Array] filters to be applied to the HTTP requests.
#
def initialize(credentials = nil, base_url = nil, options = nil)
super(credentials, options)
@base_url = base_url || 'https://management.azure.com'
fail ArgumentError, 'invalid type of credentials input parameter' unless credentials.is_a?(MsRest::ServiceClientCredentials) unless credentials.nil?
@credentials = credentials
@deployments = Deployments.new(self)
@providers = Providers.new(self)
@resources = Resources.new(self)
@resource_groups = ResourceGroups.new(self)
@tags = Tags.new(self)
@deployment_operations = DeploymentOperations.new(self)
@api_version = '2018-02-01'
@accept_language = 'en-US'
@long_running_operation_retry_timeout = 30
@generate_client_request_id = true
add_telemetry
end
#
# Makes a request and returns the body of the response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Hash{String=>String}] containing the body of the response.
# Example:
#
# request_content = "{'location':'westus','tags':{'tag1':'val1','tag2':'val2'}}"
# path = "/path"
# options = {
# body: request_content,
# query_params: {'api-version' => '2016-02-01'}
# }
# result = @client.make_request(:put, path, options)
#
def make_request(method, path, options = {})
result = make_request_with_http_info(method, path, options)
result.body unless result.nil?
end
#
# Makes a request and returns the operation response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [MsRestAzure::AzureOperationResponse] Operation response containing the request, response and status.
#
def make_request_with_http_info(method, path, options = {})
result = make_request_async(method, path, options).value!
result.body = result.response.body.to_s.empty? ? nil : JSON.load(result.response.body)
result
end
#
# Makes a request asynchronously.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def make_request_async(method, path, options = {})
fail ArgumentError, 'method is nil' if method.nil?
fail ArgumentError, 'path is nil' if path.nil?
request_url = options[:base_url] || @base_url
if(!options[:headers].nil? && !options[:headers]['Content-Type'].nil?)
@request_headers['Content-Type'] = options[:headers]['Content-Type']
end
request_headers = @request_headers
request_headers.merge!({'accept-language' => @accept_language}) unless @accept_language.nil?
options.merge!({headers: request_headers.merge(options[:headers] || {})})
options.merge!({credentials: @credentials}) unless @credentials.nil?
super(request_url, method, path, options)
end
private
#
# Adds telemetry information.
#
def add_telemetry
sdk_information = 'azure_mgmt_resources'
sdk_information = "#{sdk_information}/0.18.2"
add_user_agent_information(sdk_information)
end
end
end
| 38.046053 | 154 | 0.693412 |
01d309bc3cb25c61354d379b05a4ee63de3bf3a6 | 169 | module Erlash
class ArrayFormatter < TemplateFormatter
def format
elems = object.map{ |e| format_elem(e) }
"[ #{elems.join(", ")} ]"
end
end
end
| 18.777778 | 46 | 0.60355 |
91e6b64338fd5999ca4cc3f50f328db1168a6024 | 112 | class UpdateJobs < ActiveRecord::Migration[5.2]
def change
add_column :jobs, :location, :string
end
end
| 18.666667 | 47 | 0.723214 |
03dbd0a24c96e93efdd80e8fea25d95137b89379 | 342 | Merb.logger.info("Loaded DEVELOPMENT Environment...")
Merb::Config.use { |c|
c[:exception_details] = true
c[:reload_templates] = true
c[:reload_classes] = true
c[:reload_time] = 0.5
c[:log_auto_flush ] = true
c[:ignore_tampered_cookies] = true
c[:log_level] = :debug
}
class Merb::Mailer
self.delivery_method = :test_send
end | 24.428571 | 53 | 0.701754 |
bb02be7c7e256bde8e7654bbdeaa2851c11f996a | 2,201 | RSpec.describe Guide, type: :model do
let(:id) { 'the_test_guide' }
let(:locale) { 'en' }
let(:content_type) { :govspeak }
let(:content) { '/examples/test.md' }
let(:label) { 'Tested' }
let(:concise_label) { 'Test' }
let(:description) { 'A test description' }
let(:tags) { [] }
let(:metadata) do
{
label: label,
concise_label: concise_label,
description: description,
tags: tags
}
end
subject(:guide) do
described_class.new(id, locale, content: content, content_type: content_type, metadata: metadata)
end
%i(id locale content content_type).each do |attr|
describe "##{attr}" do
it "returns the initialised #{attr}" do
expect(guide.public_send(attr)).to eq(public_send(attr))
end
end
end
%i(label concise_label description tags).each do |attr|
describe "##{attr}" do
it "returns the initialised #{attr}" do
expect(guide.metadata.public_send(attr)).to eq(public_send(attr))
end
end
end
describe '#slug' do
it 'returns a slug based on the id' do
expect(guide.slug).to eq('the-test-guide')
end
end
describe '#==' do
it 'considers two guides with the same ID as equal' do
expect(described_class.new('foo', locale)).to eq(described_class.new('foo', locale))
end
end
describe '#welsh?' do
context 'when tagged with "welsh"' do
let(:tags) { %w(welsh) }
it { is_expected.to be_welsh }
end
end
describe '#related_to_appointments?' do
context 'when tagged with "appointments"' do
let(:tags) { %w(appointments) }
it { is_expected.to be_related_to_appointments }
end
end
describe '#related_to_booking?' do
context 'when tagged with "booking"' do
let(:tags) { %w(booking) }
it { is_expected.to be_related_to_booking }
end
end
describe '#option?' do
context 'when tagged with "option"' do
let(:tags) { %w(option) }
it { is_expected.to be_option }
end
end
describe '#call_to_action?' do
context 'when tagged with "call-to-action"' do
let(:tags) { %w(call-to-action) }
it { is_expected.to be_call_to_action }
end
end
end
| 23.923913 | 101 | 0.629259 |
385a0efcc4ac233ed8098c0e848b7f7b982e19f1 | 4,594 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper.rb')
describe OpenProject::JournalFormatter::Attachment do
include ApplicationHelper
include ActionView::Helpers::TagHelper
# WARNING: the order of the modules is important to ensure that url_for of
# ActionController::UrlWriter is called and not the one of ActionView::Helpers::UrlHelper
include ActionView::Helpers::UrlHelper
include Rails.application.routes.url_helpers
def self.default_url_options
{ only_path: true }
end
let(:klass) { OpenProject::JournalFormatter::Attachment }
let(:instance) { klass.new(journal) }
let(:id) { 1 }
let(:journal) do
OpenStruct.new(id: id)
end
let(:user) { FactoryBot.create(:user) }
let(:attachment) do
FactoryBot.create(:attachment,
author: user)
end
let(:key) { "attachments_#{attachment.id}" }
describe '#render' do
describe 'WITH the first value beeing nil, and the second an id as string' do
it 'adds an attachment added text' do
link = "#{Setting.protocol}://#{Setting.host_name}/api/v3/attachments/#{attachment.id}/content"
expect(instance.render(key, [nil, attachment.id.to_s]))
.to eq(I18n.t(:text_journal_added,
label: "<strong>#{I18n.t(:'activerecord.models.attachment')}</strong>",
value: "<a href=\"#{link}\">#{attachment.filename}</a>"))
end
context 'WITH a relative_url_root' do
before do
allow(OpenProject::Configuration)
.to receive(:rails_relative_url_root)
.and_return('/blubs')
end
it 'adds an attachment added text' do
link = "#{Setting.protocol}://#{Setting.host_name}/blubs/api/v3/attachments/#{attachment.id}/content"
expect(instance.render(key, [nil, attachment.id.to_s]))
.to eq(I18n.t(:text_journal_added,
label: "<strong>#{I18n.t(:'activerecord.models.attachment')}</strong>",
value: "<a href=\"#{link}\">#{attachment.filename}</a>"))
end
end
end
describe 'WITH the first value beeing an id as string, and the second nil' do
let(:expected) do
I18n.t(:text_journal_deleted,
label: "<strong>#{I18n.t(:'activerecord.models.attachment')}</strong>",
old: "<strike><i title=\"#{attachment.id}\">#{attachment.id}</i></strike>")
end
it { expect(instance.render(key, [attachment.id.to_s, nil])).to eq(expected) }
end
describe "WITH the first value beeing nil, and the second an id as a string
WITH specifying not to output html" do
let(:expected) do
I18n.t(:text_journal_added,
label: I18n.t(:'activerecord.models.attachment'),
value: attachment.id)
end
it { expect(instance.render(key, [nil, attachment.id.to_s], no_html: true)).to eq(expected) }
end
describe "WITH the first value beeing an id as string, and the second nil,
WITH specifying not to output html" do
let(:expected) do
I18n.t(:text_journal_deleted,
label: I18n.t(:'activerecord.models.attachment'),
old: attachment.id)
end
it { expect(instance.render(key, [attachment.id.to_s, nil], no_html: true)).to eq(expected) }
end
end
end
| 39.603448 | 111 | 0.659338 |
e9e56b257c74882ab225bd3240c22ed4a8ac1f7c | 341 | # frozen_string_literal: true
public def chars_count(word)
counter = {}
counter.default = 0
word.chars.each { |x| counter[x] = counter[x] + 1 }
counter
end
public def anagrams(word, words)
word_hash = chars_count(word)
word_length = word.length
words.select { |w| word_length == w.length && word_hash == chars_count(w) }
end
| 22.733333 | 77 | 0.695015 |
ac59a25cc1090e6620489e6e7f77823434e1ec55 | 35,675 | # frozen_string_literal: true
# encoding: utf-8
# Copyright (C) 2014-2020 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'mongo/cluster/topology'
require 'mongo/cluster/reapers/socket_reaper'
require 'mongo/cluster/reapers/cursor_reaper'
require 'mongo/cluster/periodic_executor'
module Mongo
# Represents a group of servers on the server side, either as a
# single server, a replica set, or a single or multiple mongos.
#
# @since 2.0.0
class Cluster
extend Forwardable
include Monitoring::Publishable
include Event::Subscriber
include Loggable
include ClusterTime::Consumer
# The default number of legacy read retries.
#
# @since 2.1.1
MAX_READ_RETRIES = 1
# The default number of legacy write retries.
#
# @since 2.4.2
MAX_WRITE_RETRIES = 1
# The default read retry interval, in seconds, when using legacy read
# retries.
#
# @since 2.1.1
READ_RETRY_INTERVAL = 5
# How often an idle primary writes a no-op to the oplog.
#
# @since 2.4.0
IDLE_WRITE_PERIOD_SECONDS = 10
# The cluster time key in responses from mongos servers.
#
# @since 2.5.0
# @deprecated
CLUSTER_TIME = 'clusterTime'.freeze
# Instantiate the new cluster.
#
# @api private
#
# @example Instantiate the cluster.
# Mongo::Cluster.new(["127.0.0.1:27017"], monitoring)
#
# @note Cluster should never be directly instantiated outside of a Client.
#
# @note When connecting to a mongodb+srv:// URI, the client expands such a
# URI into a list of servers and passes that list to the Cluster
# constructor. When connecting to a standalone mongod, the Cluster
# constructor receives the corresponding address as an array of one string.
#
# @param [ Array<String> ] seeds The addresses of the configured servers
# @param [ Monitoring ] monitoring The monitoring.
# @param [ Hash ] options Options. Client constructor forwards its
# options to Cluster constructor, although Cluster recognizes
# only a subset of the options recognized by Client.
#
# @option options [ true | false ] :direct_connection Whether to connect
# directly to the specified seed, bypassing topology discovery. Exactly
# one seed must be provided.
# @option options [ Symbol ] :connect Deprecated - use :direct_connection
# option instead of this option. The connection method to use. This
# forces the cluster to behave in the specified way instead of
# auto-discovering. One of :direct, :replica_set, :sharded
# @option options [ Symbol ] :replica_set The name of the replica set to
# connect to. Servers not in this replica set will be ignored.
# @option options [ true | false ] :scan Whether to scan all seeds
# in constructor. The default in driver version 2.x is to do so;
# driver version 3.x will not scan seeds in constructor. Opt in to the
# new behavior by setting this option to false. *Note:* setting
# this option to nil enables scanning seeds in constructor in driver
# version 2.x. Driver version 3.x will recognize this option but
# will ignore it and will never scan seeds in the constructor.
# @option options [ true | false ] :monitoring_io For internal driver
# use only. Set to false to prevent SDAM-related I/O from being
# done by this cluster or servers under it. Note: setting this option
# to false will make the cluster non-functional. It is intended for
# use in tests which manually invoke SDAM state transitions.
# @option options [ true | false ] :cleanup For internal driver use only.
# Set to false to prevent endSessions command being sent to the server
# to clean up server sessions when the cluster is disconnected, and to
# to not start the periodic executor. If :monitoring_io is false,
# :cleanup automatically defaults to false as well.
# @option options [ Float ] :heartbeat_frequency The interval, in seconds,
# for the server monitor to refresh its description via hello.
# @option options [ Hash ] :resolv_options For internal driver use only.
# Options to pass through to Resolv::DNS constructor for SRV lookups.
# @option options [ Hash ] :server_api The requested server API version.
# This hash can have the following items:
# - *:version* -- string
# - *:strict* -- boolean
# - *:deprecation_errors* -- boolean
#
# @since 2.0.0
def initialize(seeds, monitoring, options = Options::Redacted.new)
if seeds.nil?
raise ArgumentError, 'Seeds cannot be nil'
end
options = options.dup
if options[:monitoring_io] == false && !options.key?(:cleanup)
options[:cleanup] = false
end
@options = options.freeze
# @update_lock covers @servers, @connecting, @connected, @topology and
# @sessions_supported. Generally instance variables that do not have a
# designated for them lock should only be modified under the update lock.
# Note that topology change is locked by @update_lock and not by
# @sdam_flow_lock.
@update_lock = Mutex.new
@servers = []
@monitoring = monitoring
@event_listeners = Event::Listeners.new
@app_metadata = Server::AppMetadata.new(@options.merge(purpose: :application))
@monitor_app_metadata = Server::Monitor::AppMetadata.new(@options.merge(purpose: :monitor))
@push_monitor_app_metadata = Server::Monitor::AppMetadata.new(@options.merge(purpose: :push_monitor))
@cluster_time_lock = Mutex.new
@cluster_time = nil
@srv_monitor_lock = Mutex.new
@srv_monitor = nil
@server_selection_semaphore = Semaphore.new
@topology = Topology.initial(self, monitoring, options)
# State change lock is similar to the sdam flow lock, but is designed
# to serialize state changes initated by consumers of Cluster
# (e.g. application connecting or disconnecting the cluster), so that
# e.g. an application calling disconnect-connect-disconnect rapidly
# does not put the cluster into an inconsistent state.
# Monitoring updates performed internally by the driver do not take
# the state change lock.
@state_change_lock = Mutex.new
# @sdam_flow_lock covers just the sdam flow. Note it does not apply
# to @topology replacements which are done under @update_lock.
@sdam_flow_lock = Mutex.new
Session::SessionPool.create(self)
# The opening topology is always unknown with no servers.
# https://github.com/mongodb/specifications/pull/388
opening_topology = Topology::Unknown.new(options, monitoring, self)
publish_sdam_event(
Monitoring::TOPOLOGY_OPENING,
Monitoring::Event::TopologyOpening.new(opening_topology)
)
@seeds = seeds = seeds.uniq
servers = seeds.map do |seed|
# Server opening events must be sent after topology change events.
# Therefore separate server addition, done here before topoolgy change
# event is published, from starting to monitor the server which is
# done later.
add(seed, monitor: false)
end
if seeds.size >= 1
# Recreate the topology to get the current server list into it
@topology = topology.class.new(topology.options, topology.monitoring, self)
publish_sdam_event(
Monitoring::TOPOLOGY_CHANGED,
Monitoring::Event::TopologyChanged.new(opening_topology, @topology)
)
end
if options[:monitoring_io] == false
# Omit periodic executor construction, because without servers
# no commands can be sent to the cluster and there shouldn't ever
# be anything that needs to be cleaned up.
#
# Omit monitoring individual servers and the legacy single round of
# of SDAM on the main thread, as it would race with tests that mock
# SDAM responses.
@connecting = @connected = false
return
end
# Update instance variables prior to starting monitoring threads.
@connecting = false
@connected = true
if options[:cleanup] != false
@cursor_reaper = CursorReaper.new(self)
@socket_reaper = SocketReaper.new(self)
@periodic_executor = PeriodicExecutor.new([
@cursor_reaper, @socket_reaper,
], options)
@periodic_executor.run!
end
unless load_balanced?
# Need to record start time prior to starting monitoring
start_monotime = Utils.monotonic_time
servers.each do |server|
server.start_monitoring
end
if options[:scan] != false
server_selection_timeout = options[:server_selection_timeout] || ServerSelector::SERVER_SELECTION_TIMEOUT
# The server selection timeout can be very short especially in
# tests, when the client waits for a synchronous scan before
# starting server selection. Limiting the scan to server selection time
# then aborts the scan before it can process even local servers.
# Therefore, allow at least 3 seconds for the scan here.
if server_selection_timeout < 3
server_selection_timeout = 3
end
deadline = start_monotime + server_selection_timeout
# Wait for the first scan of each server to complete, for
# backwards compatibility.
# If any servers are discovered during this SDAM round we are going to
# wait for these servers to also be queried, and so on, up to the
# server selection timeout or the 3 second minimum.
loop do
# Ensure we do not try to read the servers list while SDAM is running
servers = @sdam_flow_lock.synchronize do
servers_list.dup
end
if servers.all? { |server| server.last_scan_monotime && server.last_scan_monotime >= start_monotime }
break
end
if (time_remaining = deadline - Utils.monotonic_time) <= 0
break
end
log_debug("Waiting for up to #{'%.2f' % time_remaining} seconds for servers to be scanned: #{summary}")
# Since the semaphore may have been signaled between us checking
# the servers list above and the wait call below, we should not
# wait for the full remaining time - wait for up to 1 second, then
# recheck the state.
begin
server_selection_semaphore.wait([time_remaining, 1].min)
rescue ::Timeout::Error
# nothing
end
end
end
start_stop_srv_monitor
end
end
# Create a cluster for the provided client, for use when we don't want the
# client's original cluster instance to be the same.
#
# @api private
#
# @example Create a cluster for the client.
# Cluster.create(client)
#
# @param [ Client ] client The client to create on.
#
# @return [ Cluster ] The cluster.
#
# @since 2.0.0
def self.create(client)
cluster = Cluster.new(
client.cluster.addresses.map(&:to_s),
Monitoring.new,
client.cluster_options,
)
client.instance_variable_set(:@cluster, cluster)
end
# @return [ Hash ] The options hash.
attr_reader :options
# @return [ Monitoring ] monitoring The monitoring.
attr_reader :monitoring
# @return [ Object ] The cluster topology.
attr_reader :topology
# @return [ Mongo::Server::AppMetadata ] The application metadata, used for
# connection handshakes.
#
# @since 2.4.0
attr_reader :app_metadata
# @api private
attr_reader :monitor_app_metadata
# @api private
attr_reader :push_monitor_app_metadata
# @return [ Array<String> ] The addresses of seed servers. Contains
# addresses that were given to Cluster when it was instantiated, not
# current addresses that the cluster is using as a result of SDAM.
#
# @since 2.7.0
# @api private
attr_reader :seeds
# @private
#
# @since 2.5.1
attr_reader :session_pool
def_delegators :topology, :replica_set?, :replica_set_name, :sharded?,
:single?, :unknown?
# Returns whether the cluster is configured to be in the load-balanced
# topology.
#
# @return [ true | false ] Whether the topology is load-balanced.
def load_balanced?
topology.is_a?(Topology::LoadBalanced)
end
[:register_cursor, :schedule_kill_cursor, :unregister_cursor].each do |m|
define_method(m) do |*args|
if options[:cleanup] != false
@cursor_reaper.send(m, *args)
end
end
end
# @api private
attr_reader :srv_monitor
# Get the maximum number of times the client can retry a read operation
# when using legacy read retries.
#
# @note max_read_retries should be retrieved from the Client instance,
# not from a Cluster instance, because clusters may be shared between
# clients with different values for max read retries.
#
# @example Get the max read retries.
# cluster.max_read_retries
#
# @return [ Integer ] The maximum number of retries.
#
# @since 2.1.1
# @deprecated
def max_read_retries
options[:max_read_retries] || MAX_READ_RETRIES
end
# Get the interval, in seconds, in which read retries when using legacy
# read retries.
#
# @note read_retry_interval should be retrieved from the Client instance,
# not from a Cluster instance, because clusters may be shared between
# clients with different values for the read retry interval.
#
# @example Get the read retry interval.
# cluster.read_retry_interval
#
# @return [ Float ] The interval.
#
# @since 2.1.1
# @deprecated
def read_retry_interval
options[:read_retry_interval] || READ_RETRY_INTERVAL
end
# Get the refresh interval for the server. This will be defined via an
# option or will default to 10.
#
# @return [ Float ] The heartbeat interval, in seconds.
#
# @since 2.10.0
# @api private
def heartbeat_interval
options[:heartbeat_frequency] || Server::Monitor::DEFAULT_HEARTBEAT_INTERVAL
end
# Whether the cluster object is in the process of connecting to its cluster.
#
# @return [ true|false ] Whether the cluster is connecting.
#
# @api private
def connecting?
@update_lock.synchronize do
!!@connecting
end
end
# Whether the cluster object is connected to its cluster.
#
# @return [ true|false ] Whether the cluster is connected.
#
# @api private
# @since 2.7.0
def connected?
@update_lock.synchronize do
!!@connected
end
end
# Get a list of server candidates from the cluster that can have operations
# executed on them.
#
# @example Get the server candidates for an operation.
# cluster.servers
#
# @return [ Array<Server> ] The candidate servers.
#
# @since 2.0.0
def servers
topology.servers(servers_list)
end
# The addresses in the cluster.
#
# @example Get the addresses in the cluster.
# cluster.addresses
#
# @return [ Array<Mongo::Address> ] The addresses.
#
# @since 2.0.6
def addresses
servers_list.map(&:address)
end
# The logical session timeout value in minutes.
#
# @example Get the logical session timeout in minutes.
# cluster.logical_session_timeout
#
# @return [ Integer, nil ] The logical session timeout.
#
# @since 2.5.0
def_delegators :topology, :logical_session_timeout
# Get the nicer formatted string for use in inspection.
#
# @example Inspect the cluster.
# cluster.inspect
#
# @return [ String ] The cluster inspection.
#
# @since 2.0.0
def inspect
"#<Mongo::Cluster:0x#{object_id} servers=#{servers} topology=#{topology.summary}>"
end
# @note This method is experimental and subject to change.
#
# @api experimental
# @since 2.7.0
def summary
"#<Cluster " +
"topology=#{topology.summary} "+
"servers=[#{servers_list.map(&:summary).join(',')}]>"
end
# @api private
attr_reader :server_selection_semaphore
# Closes the cluster.
#
# @note Applications should call Client#close to disconnect from
# the cluster rather than calling this method. This method is for
# internal driver use only.
#
# Disconnects all servers in the cluster, publishing appropriate SDAM
# events in the process. Stops SRV monitoring if it is active.
# Marks the cluster disconnected.
#
# @return [ true ] Always true.
#
# @since 2.1.0
def disconnect!
@state_change_lock.synchronize do
unless connecting? || connected?
return true
end
if options[:cleanup] != false
session_pool.end_sessions
@periodic_executor.stop!
end
@srv_monitor_lock.synchronize do
if @srv_monitor
@srv_monitor.stop!
end
end
@servers.each do |server|
if server.connected?
server.disconnect!
publish_sdam_event(
Monitoring::SERVER_CLOSED,
Monitoring::Event::ServerClosed.new(server.address, topology)
)
end
end
publish_sdam_event(
Monitoring::TOPOLOGY_CLOSED,
Monitoring::Event::TopologyClosed.new(topology)
)
@update_lock.synchronize do
@connecting = @connected = false
end
end
true
end
# Reconnect all servers.
#
# @example Reconnect the cluster's servers.
# cluster.reconnect!
#
# @return [ true ] Always true.
#
# @since 2.1.0
# @deprecated Use Client#reconnect to reconnect to the cluster instead of
# calling this method. This method does not send SDAM events.
def reconnect!
@state_change_lock.synchronize do
@update_lock.synchronize do
@connecting = true
end
scan!
servers.each do |server|
server.reconnect!
end
@periodic_executor.restart!
@srv_monitor_lock.synchronize do
if @srv_monitor
@srv_monitor.run!
end
end
@update_lock.synchronize do
@connecting = false
@connected = true
end
end
end
# Force a scan of all known servers in the cluster.
#
# If the sync parameter is true which is the default, the scan is
# performed synchronously in the thread which called this method.
# Each server in the cluster is checked sequentially. If there are
# many servers in the cluster or they are slow to respond, this
# can be a long running operation.
#
# If the sync parameter is false, this method instructs all server
# monitor threads to perform an immediate scan and returns without
# waiting for scan results.
#
# @note In both synchronous and asynchronous scans, each monitor
# thread maintains a minimum interval between scans, meaning
# calling this method may not initiate a scan on a particular server
# the very next instant.
#
# @example Force a full cluster scan.
# cluster.scan!
#
# @return [ true ] Always true.
#
# @since 2.0.0
def scan!(sync=true)
if sync
servers_list.each do |server|
if server.monitor
server.monitor.scan!
else
log_warn("Synchronous scan requested on cluster #{summary} but server #{server} has no monitor")
end
end
else
servers_list.each do |server|
server.scan_semaphore.signal
end
end
true
end
# Runs SDAM flow on the cluster.
#
# This method can be invoked to process a new server description returned
# by the server on a monitoring or non-monitoring connection, and also
# by the driver when it marks a server unknown as a result of a (network)
# error.
#
# @param [ Server::Description ] previous_desc Previous server description.
# @param [ Server::Description ] updated_desc The changed description.
# @param [ Hash ] options Options.
#
# @option options [ true | false ] :keep_connection_pool Usually when the
# new server description is unknown, the connection pool on the
# respective server is cleared. Set this option to true to keep the
# existing connection pool (required when handling not master errors
# on 4.2+ servers).
# @option aptions [ true | false ] :awaited Whether the updated description
# was a result of processing an awaited hello.
# @option options [ Object ] :service_id Change state for the specified
# service id only.
#
# @api private
def run_sdam_flow(previous_desc, updated_desc, options = {})
if load_balanced?
if updated_desc.config.empty?
unless options[:keep_connection_pool]
servers_list.each do |server|
# TODO should service id be taken out of updated_desc?
# We could also assert that
# options[:service_id] == updated_desc.service_id
server.clear_connection_pool(service_id: options[:service_id])
end
end
end
return
end
@sdam_flow_lock.synchronize do
flow = SdamFlow.new(self, previous_desc, updated_desc,
awaited: options[:awaited])
flow.server_description_changed
# SDAM flow may alter the updated description - grab the final
# version for the purposes of broadcasting if a server is available
updated_desc = flow.updated_desc
unless options[:keep_connection_pool]
if flow.became_unknown?
servers_list.each do |server|
if server.address == updated_desc.address
server.clear_connection_pool
end
end
end
end
start_stop_srv_monitor
end
# Some updated descriptions, e.g. a mismatched me one, result in the
# server whose description we are processing being removed from
# the topology. When this happens, the server's monitoring thread gets
# killed. As a result, any code after the flow invocation may not run
# a particular monitor instance, hence there should generally not be
# any code in this method past the flow invocation.
#
# However, this broadcast call can be here because if the monitoring
# thread got killed the server should have been closed and no client
# should be currently waiting for it, thus not signaling the semaphore
# shouldn't cause any problems.
unless updated_desc.unknown?
server_selection_semaphore.broadcast
end
end
# Sets the list of servers to the addresses in the provided list of address
# strings.
#
# This method is called by the SRV monitor after receiving new DNS records
# for the monitored hostname.
#
# Removes servers in the cluster whose addresses are not in the passed
# list of server addresses, and adds servers for any addresses in the
# argument which are not already in the cluster.
#
# @param [ Array<String> ] server_address_strs List of server addresses
# to sync the cluster servers to.
#
# @api private
def set_server_list(server_address_strs)
@sdam_flow_lock.synchronize do
server_address_strs.each do |address_str|
unless servers_list.any? { |server| server.address.seed == address_str }
add(address_str)
end
end
servers_list.each do |server|
unless server_address_strs.any? { |address_str| server.address.seed == address_str }
remove(server.address.seed)
end
end
end
end
# Determine if this cluster of servers is equal to another object. Checks the
# servers currently in the cluster, not what was configured.
#
# @example Is the cluster equal to the object?
# cluster == other
#
# @param [ Object ] other The object to compare to.
#
# @return [ true, false ] If the objects are equal.
#
# @since 2.0.0
def ==(other)
return false unless other.is_a?(Cluster)
addresses == other.addresses && options == other.options
end
# Determine if the cluster would select a readable server for the
# provided read preference.
#
# @example Is a readable server present?
# topology.has_readable_server?(server_selector)
#
# @param [ ServerSelector ] server_selector The server
# selector.
#
# @return [ true, false ] If a readable server is present.
#
# @since 2.4.0
def has_readable_server?(server_selector = nil)
topology.has_readable_server?(self, server_selector)
end
# Determine if the cluster would select a writable server.
#
# @example Is a writable server present?
# topology.has_writable_server?
#
# @return [ true, false ] If a writable server is present.
#
# @since 2.4.0
def has_writable_server?
topology.has_writable_server?(self)
end
# Get the next primary server we can send an operation to.
#
# @example Get the next primary server.
# cluster.next_primary
#
# @param [ true, false ] ping Whether to ping the server before selection.
# Deprecated and ignored.
# @param [ Session | nil ] session Optional session to take into account
# for mongos pinning.
#
# @return [ Mongo::Server ] A primary server.
#
# @since 2.0.0
def next_primary(ping = nil, session = nil)
ServerSelector.primary.select_server(self, nil, session)
end
# Get the connection pool for the server.
#
# @example Get the connection pool.
# cluster.pool(server)
#
# @param [ Server ] server The server.
#
# @return [ Server::ConnectionPool ] The connection pool.
#
# @since 2.2.0
# @deprecated
def pool(server)
server.pool
end
# Update the max cluster time seen in a response.
#
# @example Update the cluster time.
# cluster.update_cluster_time(result)
#
# @param [ Operation::Result ] result The operation result containing the cluster time.
#
# @return [ Object ] The cluster time.
#
# @since 2.5.0
def update_cluster_time(result)
if cluster_time_doc = result.cluster_time
@cluster_time_lock.synchronize do
advance_cluster_time(cluster_time_doc)
end
end
end
# Add a server to the cluster with the provided address. Useful in
# auto-discovery of new servers when an existing server executes a hello
# and potentially non-configured servers were included.
#
# @example Add the server for the address to the cluster.
# cluster.add('127.0.0.1:27018')
#
# @param [ String ] host The address of the server to add.
#
# @option options [ Boolean ] :monitor For internal driver use only:
# whether to monitor the newly added server.
#
# @return [ Server ] The newly added server, if not present already.
#
# @since 2.0.0
def add(host, add_options=nil)
address = Address.new(host, options)
if !addresses.include?(address)
opts = options.merge(monitor: false)
if Topology::LoadBalanced === topology
opts[:load_balancer] = true
end
server = Server.new(address, self, @monitoring, event_listeners, opts)
@update_lock.synchronize do
# Need to recheck whether server is present in @servers, because
# the previous check was not under a lock.
# Since we are under the update lock here, we cannot call servers_list.
return if @servers.map(&:address).include?(address)
@servers.push(server)
end
if add_options.nil? || add_options[:monitor] != false
server.start_monitoring
end
server
end
end
# Remove the server from the cluster for the provided address, if it
# exists.
#
# @example Remove the server from the cluster.
# server.remove('127.0.0.1:27017')
#
# @param [ String ] host The host/port or socket address.
# @param [ true | false ] disconnect Whether to disconnect the servers
# being removed. For internal driver use only.
#
# @return [ Array<Server> | true | false ] If disconnect is any value other
# than false, including nil, returns whether any servers were removed.
# If disconnect is false, returns an array of servers that were removed
# (and should be disconnected by the caller).
#
# @note The return value of this method is not part of the driver's
# public API.
#
# @since 2.0.0
def remove(host, disconnect: true)
address = Address.new(host)
removed_servers = []
@update_lock.synchronize do
@servers.delete_if do |server|
(server.address == address).tap do |delete|
if delete
removed_servers << server
end
end
end
end
if disconnect != false
removed_servers.each do |server|
disconnect_server_if_connected(server)
end
end
if disconnect != false
removed_servers.any?
else
removed_servers
end
end
# @api private
def update_topology(new_topology)
old_topology = nil
@update_lock.synchronize do
old_topology = topology
@topology = new_topology
end
# If new topology has data bearing servers, we know for sure whether
# sessions are supported - update our cached value.
# If new topology has no data bearing servers, leave the old value
# as it is and sessions_supported? method will perform server selection
# to try to determine session support accurately, falling back to the
# last known value.
if topology.data_bearing_servers?
sessions_supported = !!topology.logical_session_timeout
@update_lock.synchronize do
@sessions_supported = sessions_supported
end
end
publish_sdam_event(
Monitoring::TOPOLOGY_CHANGED,
Monitoring::Event::TopologyChanged.new(old_topology, topology)
)
end
# @api private
def servers_list
@update_lock.synchronize do
@servers.dup
end
end
# @api private
def disconnect_server_if_connected(server)
if server.connected?
server.disconnect!
publish_sdam_event(
Monitoring::SERVER_CLOSED,
Monitoring::Event::ServerClosed.new(server.address, topology)
)
end
end
# Raises Error::SessionsNotAvailable if the deployment that the driver
# is connected to does not support sessions.
#
# Session support may change over time, for example due to servers in the
# deployment being upgraded or downgraded. If the client is currently not
# connected to any data bearing servers, this method considers the state
# of session support as of when the client was last connected to at
# least one server. If the client has never connected to any servers,
# the deployment is considered to not support sessions.
#
# @api private
def validate_session_support!
if topology.is_a?(Topology::LoadBalanced)
return
end
@state_change_lock.synchronize do
@sdam_flow_lock.synchronize do
if topology.data_bearing_servers?
unless topology.logical_session_timeout
raise_sessions_not_supported
end
end
end
end
# No data bearing servers known - perform server selection to try to
# get a response from at least one of them, to return an accurate
# assessment of whether sessions are currently supported.
begin
ServerSelector.get(mode: :primary_preferred).select_server(self)
@state_change_lock.synchronize do
@sdam_flow_lock.synchronize do
unless topology.logical_session_timeout
raise_sessions_not_supported
end
end
end
rescue Error::NoServerAvailable
# We haven't been able to contact any servers - use last known
# value for session support.
@state_change_lock.synchronize do
@sdam_flow_lock.synchronize do
@update_lock.synchronize do
unless @sessions_supported
raise_sessions_not_supported
end
end
end
end
end
end
private
# @api private
def start_stop_srv_monitor
# SRV URI is either always given or not for a given cluster, if one
# wasn't given we shouldn't ever have an SRV monitor to manage.
return unless options[:srv_uri]
if topology.is_a?(Topology::Sharded) || topology.is_a?(Topology::Unknown)
# Start SRV monitor
@srv_monitor_lock.synchronize do
unless @srv_monitor
monitor_options = Utils.shallow_symbolize_keys(options.merge(
timeout: options[:connect_timeout] || Server::CONNECT_TIMEOUT))
@srv_monitor = _srv_monitor = Srv::Monitor.new(self, **monitor_options)
end
@srv_monitor.run!
end
else
# Stop SRV monitor if running. This path is taken when the client
# is given an SRV URI to a standalone/replica set; when the topology
# is discovered, since it's not a sharded cluster, the SRV monitor
# needs to be stopped.
@srv_monitor_lock.synchronize do
if @srv_monitor
@srv_monitor.stop!
end
end
end
end
def raise_sessions_not_supported
# Intentionally using @servers instead of +servers+ here because we
# are supposed to be already holding the @update_lock and we cannot
# recursively acquire it again.
offending_servers = @servers.select do |server|
server.description.data_bearing? && server.logical_session_timeout.nil?
end
reason = if offending_servers.empty?
"There are no known data bearing servers (current seeds: #{@servers.map(&:address).map(&:seed).join(', ')})"
else
"The following servers have null logical session timeout: #{offending_servers.map(&:address).map(&:seed).join(', ')}"
end
msg = "The deployment that the driver is connected to does not support sessions: #{reason}"
raise Error::SessionsNotSupported, msg
end
end
end
require 'mongo/cluster/sdam_flow'
| 34.907045 | 125 | 0.649362 |
e9cfb444c181b2961a6310e660baf4010473cf38 | 3,049 | require 'rails_helper'
describe Assessment do
subject { build(:assessment) }
it('belongs to Fund') { assoc(:fund, :belongs_to) }
it('belongs to Proposal') { assoc(:proposal, :belongs_to) }
it('belongs to Recipient') { assoc(:recipient, :belongs_to) }
it { is_expected.to be_valid }
it 'requires associations' do
%i[fund proposal recipient].each do |assoc|
subject.send("#{assoc}=", nil)
is_expected.not_to be_valid
end
end
context do
let(:eligibility) do
%i[
eligibility_amount
eligibility_location
eligibility_proposal_categories
eligibility_quiz
eligibility_recipient_categories
]
end
let(:suitability) do
%i[
suitability_quiz
]
end
it 'ELIGIBILITY_COLUMNS correct' do
expect(Assessment::ELIGIBILITY_COLUMNS).to match_array(eligibility)
end
it 'SUITABILITY_COLUMNS correct' do
expect(Assessment::SUITABILITY_COLUMNS).to match_array(suitability)
end
it 'PERMITTED_COLUMNS correct' do
misc = %i[
eligibility_quiz_failing
eligibility_status
suitability_quiz_failing
suitability_status
fund_version
reasons
]
permitted = eligibility + suitability + misc
expect(Assessment::PERMITTED_COLUMNS).to match_array(permitted)
end
end
context do
let(:collection) { create(:funder_with_funds) }
let(:proposal) { create(:proposal) }
it 'self.analyse' do
Assessment.analyse(collection.funds, proposal)
expect(Assessment.count).to eq(0)
end
it 'self.analyse_and_update!' do
Assessment.analyse_and_update!(collection.funds, proposal)
expect(Assessment.count).to eq(2)
end
it 'self.analyse_and_update! duplicate keys'
end
it '#attributes keys symbolized' do
subject.attributes.keys.each { |k| expect(k).to be_a(Symbol) }
end
it '#banner' do
expect(subject.banner).to be_a(Banner)
end
it '#eligibility_status unset before_validation' do
assessment = Assessment.new
expect(assessment.eligibility_status).to eq(nil)
end
context '#eligibility_status' do
context 'incomplete' do
it { expect(subject.eligibility_status).to eq(INCOMPLETE) }
end
context 'ineligible' do
subject { build(:ineligible) }
it { expect(subject.eligibility_status).to eq(INELIGIBLE) }
end
context 'eligible' do
subject { build(:eligible) }
it { expect(subject.eligibility_status).to eq(ELIGIBLE) }
end
end
context '#suitability_status' do
context 'incomplete' do
it { expect(subject.suitability_status).to eq('unclear') }
end
context 'ineligible' do
subject { build(:ineligible) }
it { expect(subject.suitability_status).to eq('avoid') }
end
context 'eligible' do
subject { build(:eligible) }
it { expect(subject.suitability_status).to eq('approach') }
end
end
it '#ratings' do
expect(subject.ratings).to all(be_a(Rating))
end
end
| 24.198413 | 73 | 0.667104 |
b9c3f2d4309eb4f016a41df64b2b93ba9c18676b | 858 | # frozen_string_literal: true
require 'httparty'
module Zoom
class Client
include HTTParty
include Actions::Account
include Actions::Data
include Actions::Group
include Actions::M323Device
include Actions::User
include Actions::Meeting
include Actions::Metrics
include Actions::Phone
include Actions::Webinar
include Actions::Report
include Actions::Recording
include Actions::IM::Chat
include Actions::IM::Group
base_uri 'https://api.zoom.us/v2'
headers 'Accept' => 'application/json'
headers 'Content-Type' => 'application/json'
def request_headers
{
'Accept' => 'application/json',
'Content-Type' => 'application/json',
'Authorization' => "Bearer #{access_token}"
}
end
end
end
require 'zoom/clients/jwt'
require 'zoom/clients/oauth'
| 22.578947 | 51 | 0.671329 |
1cc66df59c3792b9079eced1fb4862cb1ebfc540 | 731 | require 'test_helper'
class MessageTest < Minitest::Test
def private_key(path)
OpenSSL::PKey.read File.read(path)
end
def public_key(path)
OpenSSL::X509::Certificate.new File.read(path)
end
def test_base64_encoded_messages
server_key = private_key('test/certificates/server.key')
server_crt = public_key('test/certificates/server.crt')
client_crt = public_key('test/certificates/client.crt')
message = As2::Message.new(File.read('test/fixtures/hello_message'), server_key, server_crt)
assert message.valid_signature?(client_crt), "Invalid signature"
assert_equal "Hello World\n", message.attachment.body.decoded
assert_equal "hello_world.txt", message.attachment.filename
end
end
| 33.227273 | 96 | 0.756498 |
9173144426f1a65f5c9e7bb7a27c497783317a4c | 402 | class SearchCountController < ApplicationController
def new
if UsageCount.exists?
count = UsageCount.get
SetUsageCountWorker.perform_async if UsageCount.ttl < 5.minutes
else
logger.warn 'UsageCount is not found'
count = User.all.size
SetUsageCountWorker.perform_async
end
render json: {count: count}
rescue => e
render json: {count: -1}
end
end
| 23.647059 | 69 | 0.691542 |
e99fb02173be651a0b792cc55cd70b505c938360 | 3,145 | class Economic::Entity
class Handle
def self.build(options)
return options if options.is_a?(Handle)
return nil if options.nil?
new(options)
end
def self.id_properties
{
:code => 'Code',
:id => 'Id',
:id1 => 'Id1',
:id2 => 'Id2',
:name => 'Name',
:number => 'Number',
:serial_number => 'SerialNumber',
:vat_code => 'VatCode'
}
end
def self.supported_keys
id_properties.keys
end
attr_accessor *supported_keys
# Returns true if Handle hasn't been initialized with any values yet. This
# usually happens when the handle is constructed for an entity whose id
# properties (id, number, etc) haven't been set yet.
def empty?
to_hash.empty?
end
def initialize(hash)
verify_sanity_of_arguments!(hash)
hash = prepare_hash_argument(hash) unless hash.is_a?(self.class)
[:code, :name, :vat_code, :number].each do |key|
instance_variable_set("@#{key}", hash[key]) if hash[key]
end
[:id, :id1, :id2, :serial_number].each do |key|
instance_variable_set("@#{key}", hash[key].to_i) if hash[key]
end
end
def to_hash(only_keys = id_properties.keys)
only_keys = [only_keys].flatten
only_keys.each_with_object({}) do |key, hash|
property = id_properties[key]
value = self.send(key)
next if value.blank?
hash[property] = value
end
end
def [](key)
instance_variable_get("@#{key}")
end
def ==(other)
return true if self.object_id == other.object_id
return false if other.nil?
return false if empty? || (other.respond_to?(:empty?) && other.empty?)
return false unless other.respond_to?(:id) && other.respond_to?(:number)
self.id == other.id && self.number == other.number && self.id1 == other.id1 && self.id2 == other.id2
end
private
def id_properties
self.class.id_properties
end
# Raises exceptions if hash doesn't contain values we can use to construct a
# new handle
def verify_sanity_of_arguments!(hash)
return if hash.is_a?(self.class)
if hash.nil? || (!hash.respond_to?(:to_i) && (!hash.respond_to?(:keys) && !hash.respond_to?(:values)))
raise ArgumentError.new("Expected Number, Hash or Economic::Entity::Handle - got #{hash.inspect}")
end
if hash.respond_to?(:keys)
unknown_keys = hash.keys - id_properties.keys - id_properties.values
raise ArgumentError.new("Unknown keys in handle: #{unknown_keys.inspect}") unless unknown_keys.empty?
end
end
# Examples
#
# prepare_hash_argument(12) #=> {:id => 12}
# prepare_hash_argument(:id => 12) #=> {:id => 12}
# prepare_hash_argument('Id' => 12) #=> {:id => 12}
# prepare_hash_argument('Id' => 12, 'Number' => 13) #=> {:id => 12, :number => 13}
def prepare_hash_argument(hash)
hash = {:id => hash.to_i} if hash.respond_to?(:to_i) unless hash.blank?
hash[:id] ||= hash['Id']
hash[:number] ||= hash['Number']
hash
end
end
end
| 30.240385 | 109 | 0.606677 |
b96d3b4dea73f57b73a1138eae221e7ae8b458fb | 1,225 | assert("slop - parse - option - nothing") do
argv = ["path"]
options = Slop.parse(argv) do |parser|
end
assert_equal(["path"], options.arguments)
end
assert("slop - parse - option - processed") do
argv = ["--log", "/tmp/log", "path"]
options = Slop.parse(argv) do |parser|
parser.string("--log", "Log to PATH")
end
assert_equal([
{
:log => "/tmp/log",
},
["path"],
],
[
options.to_hash,
options.arguments,
])
end
assert("slop - parse - option - short") do
argv = ["-l", "/tmp/log", "path"]
options = Slop.parse(argv) do |parser|
parser.string("-l", "Log to PATH")
end
assert_equal([
{
:l => "/tmp/log",
},
["path"],
],
[
options.to_hash,
options.arguments,
])
end
assert("slop - to_s") do
$0 = "test/mrb.rb"
options = Slop::Options.new
options.string("-l", "--log", "Log to PATH")
assert_equal(<<-HELP, options.to_s)
usage: test/mrb.rb [options]
-l, --log Log to PATH
HELP
end
| 24.019608 | 46 | 0.456327 |
62e41792f2fe55f604c4bfde6d75deb56b2e5e42 | 1,755 | class TopBox::CLI
def call
puts 'Current Top Box Office Movies by imdb.com'
TopBox::Movie.new_from_collection(Scraper.scrape_movie_list)
list_movies
menu
end
def list_movies
#parse array of movie objects to output list
TopBox::Movie.all.each do |m|
puts "#{m.num}. #{m.title}"
puts "Week #{m.weeks_in_theater}, Total gross: #{m.total_gross}"
puts '--------------'
end
end
def menu
#takes user input to provide more information
puts "Enter number of a movie from the list to view a plot summary, 'list' to see the list of movies again or 'exit' to quit"
x = gets.strip.downcase
num = x.to_i
if num > 0 and num < TopBox::Movie.all.length + 1
m = TopBox::Movie.all[num-1]
if !m.summary
m.get_movie_details
end
puts "---------"
puts "#{m.title}, #{m.runtime}"
puts "Metascore: #{m.metascore}/100"
puts "---------"
puts m.summary
puts "---------"
review_menu(m)
elsif x=='exit'
exit
elsif x=='list'
list_movies
menu
else
puts "invalid entry"
menu
end
end
def review_menu(m)
#asks if user would like to see reviews for specific movie
puts "Would you like to see some critic reviews from this movie? y/n"
x=gets.strip.downcase
if x=='y'
m.get_reviews
puts "--------------"
m.reviews.each{ |review|
puts "Score: #{review.score}/100"
if review.author==''
puts "From: #{review.publication}"
else
puts "From: #{review.publication}, by #{review.author}"
end
puts review.summary
puts "--------------"
}
menu
else x=='n'
menu
end
end
end
| 24.375 | 129 | 0.565242 |
1cec2edad2d8eb68fe8efc55df02f1af2630d0dd | 4,745 | #
# Cookbook Name:: cookbook-openshift3
# Recipe:: master_standalone
#
# Copyright (c) 2015 The Authors, All Rights Reserved.
helper = OpenShiftHelper::NodeHelper.new(node)
ose_major_version = node['cookbook-openshift3']['deploy_containerized'] == true ? node['cookbook-openshift3']['openshift_docker_image_version'] : node['cookbook-openshift3']['ose_major_version']
if node['cookbook-openshift3']['openshift_master_ca_certificate']['data_bag_name'] && node['cookbook-openshift3']['openshift_master_ca_certificate']['data_bag_item_name']
secret_file = node['cookbook-openshift3']['openshift_master_ca_certificate']['secret_file'] || nil
ca_vars = data_bag_item(node['cookbook-openshift3']['openshift_master_ca_certificate']['data_bag_name'], node['cookbook-openshift3']['openshift_master_ca_certificate']['data_bag_item_name'], secret_file)
file "#{node['cookbook-openshift3']['master_certs_generated_certs_dir']}/ca.key" do
content Base64.decode64(ca_vars['key_base64'])
mode '0600'
action :create_if_missing
end
file "#{node['cookbook-openshift3']['master_certs_generated_certs_dir']}/ca.crt" do
content Base64.decode64(ca_vars['cert_base64'])
mode '0644'
action :create_if_missing
end
end
package "#{node['cookbook-openshift3']['openshift_service_type']}-master" do
action :install
version node['cookbook-openshift3']['ose_version'] unless node['cookbook-openshift3']['ose_version'].nil?
notifies :run, 'execute[daemon-reload]', :immediately
not_if { node['cookbook-openshift3']['deploy_containerized'] }
retries 3
end
ruby_block 'Duplicate Master directory' do
block do
helper.backup_dir("#{node['cookbook-openshift3']['master_certs_generated_certs_dir']}/.", node['cookbook-openshift3']['openshift_master_config_dir'])
end
end
template "/etc/systemd/system/#{node['cookbook-openshift3']['openshift_service_type']}-master.service" do
source 'service_master-containerized.service.erb'
notifies :run, 'execute[daemon-reload]', :immediately
only_if { node['cookbook-openshift3']['deploy_containerized'] }
end
sysconfig_vars = {}
if node['cookbook-openshift3']['openshift_cloud_provider'] == 'aws'
if node['cookbook-openshift3']['openshift_cloud_providers']['aws']['data_bag_name'] && node['cookbook-openshift3']['openshift_cloud_providers']['aws']['data_bag_item_name']
secret_file = node['cookbook-openshift3']['openshift_cloud_providers']['aws']['secret_file'] || nil
aws_vars = data_bag_item(node['cookbook-openshift3']['openshift_cloud_providers']['aws']['data_bag_name'], node['cookbook-openshift3']['openshift_cloud_providers']['aws']['data_bag_item_name'], secret_file)
sysconfig_vars['aws_access_key_id'] = aws_vars['access_key_id']
sysconfig_vars['aws_secret_access_key'] = aws_vars['secret_access_key']
end
end
template "/etc/sysconfig/#{node['cookbook-openshift3']['openshift_service_type']}-master" do
source 'service_master.sysconfig.erb'
variables(sysconfig_vars)
notifies :restart, 'service[Restart Master]', :immediately
end
execute 'Create the policy file' do
command "#{node['cookbook-openshift3']['openshift_common_admin_binary']} create-bootstrap-policy-file --filename=#{node['cookbook-openshift3']['openshift_master_policy']}"
creates node['cookbook-openshift3']['openshift_master_policy']
notifies :restart, 'service[Restart Master]', :immediately
end
template node['cookbook-openshift3']['openshift_master_scheduler_conf'] do
source 'scheduler.json.erb'
variables ose_major_version: node['cookbook-openshift3']['deploy_containerized'] == true ? node['cookbook-openshift3']['openshift_docker_image_version'] : node['cookbook-openshift3']['ose_major_version']
notifies :restart, 'service[Restart Master]', :immediately
end
if node['cookbook-openshift3']['oauth_Identities'].include? 'HTPasswdPasswordIdentityProvider'
package 'httpd-tools' do
retries 3
end
template node['cookbook-openshift3']['openshift_master_identity_provider']['HTPasswdPasswordIdentityProvider']['filename'] do
source 'htpasswd.erb'
mode '600'
end
end
include_recipe 'cookbook-openshift3::wire_aggregator' if ose_major_version.split('.')[1].to_i >= 7
openshift_create_master 'Create master configuration file' do
named_certificate node['cookbook-openshift3']['openshift_master_named_certificates']
origins node['cookbook-openshift3']['erb_corsAllowedOrigins'].uniq
standalone_registry node['cookbook-openshift3']['deploy_standalone_registry']
master_file node['cookbook-openshift3']['openshift_master_config_file']
openshift_service_type node['cookbook-openshift3']['openshift_service_type']
end
service "#{node['cookbook-openshift3']['openshift_service_type']}-master" do
action %i(start enable)
end
| 46.980198 | 210 | 0.772181 |
112b095d922817be1554a80583a1544e326b3a47 | 200 | class NitrousDesktop < Cask
version :latest
sha256 :no_check
url 'https://www.nitrous.io/mac/NitrousDesktop-latest.zip'
homepage 'https://www.nitrous.io/mac'
app 'Nitrous Desktop.app'
end
| 20 | 60 | 0.735 |
213d0b7f4940d1eb2e60541221909b1d7c58914c | 5,525 | #
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) 2012 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/config'
require 'chef/rest'
require 'chef/exceptions'
class Chef
class ApiClient
# ==Chef::ApiClient::Registration
# Manages the process of creating or updating a Chef::ApiClient on the
# server and writing the resulting private key to disk. Registration uses
# the validator credentials for its API calls. This allows it to bootstrap
# a new client/node identity by borrowing the validator client identity
# when creating a new client.
class Registration
attr_reader :destination
attr_reader :name
def initialize(name, destination)
@name = name
@destination = destination
@server_generated_private_key = nil
end
# Runs the client registration process, including creating the client on
# the chef-server and writing its private key to disk.
#--
# If client creation fails with a 5xx, it is retried up to 5 times. These
# retries are on top of the retries with randomized exponential backoff
# built in to Chef::REST. The retries here are a workaround for failures
# caused by resource contention in Hosted Chef when creating a very large
# number of clients simultaneously, (e.g., spinning up 100s of ec2 nodes
# at once). Future improvements to the affected component should make
# these retries unnecessary.
def run
assert_destination_writable!
retries = Config[:client_registration_retries] || 5
begin
create_or_update
rescue Net::HTTPFatalError => e
# HTTPFatalError implies 5xx.
raise if retries <= 0
retries -= 1
Chef::Log.warn("Failed to register new client, #{retries} tries remaining")
Chef::Log.warn("Response: HTTP #{e.response.code} - #{e}")
retry
end
write_key
end
def assert_destination_writable!
if (File.exists?(destination) && !File.writable?(destination)) or !File.writable?(File.dirname(destination))
raise Chef::Exceptions::CannotWritePrivateKey, "I cannot write your private key to #{destination} - check permissions?"
end
end
def write_key
::File.open(destination, file_flags, 0600) do |f|
f.print(private_key)
end
rescue IOError => e
raise Chef::Exceptions::CannotWritePrivateKey, "Error writing private key to #{destination}: #{e}"
end
def create_or_update
create
rescue Net::HTTPServerException => e
# If create fails because the client exists, attempt to update. This
# requires admin privileges.
raise unless e.response.code == "409"
update
end
def create
response = http_api.post("clients", post_data)
@server_generated_private_key = response["private_key"]
response
end
def update
response = http_api.put("clients/#{name}", put_data)
if response.respond_to?(:private_key) # Chef 11
@server_generated_private_key = response.private_key
else # Chef 10
@server_generated_private_key = response["private_key"]
end
response
end
def put_data
base_put_data = { :name => name, :admin => false }
if self_generate_keys?
base_put_data[:public_key] = generated_public_key
else
base_put_data[:private_key] = true
end
base_put_data
end
def post_data
post_data = { :name => name, :admin => false }
post_data[:public_key] = generated_public_key if self_generate_keys?
post_data
end
def http_api
@http_api_as_validator ||= Chef::REST.new(Chef::Config[:chef_server_url],
Chef::Config[:validation_client_name],
Chef::Config[:validation_key])
end
# Whether or not to generate keys locally and post the public key to the
# server. Delegates to `Chef::Config.local_key_generation`. Servers
# before 11.0 do not support this feature.
def self_generate_keys?
Chef::Config.local_key_generation
end
def private_key
if self_generate_keys?
generated_private_key.to_pem
else
@server_generated_private_key
end
end
def generated_private_key
@generated_key ||= OpenSSL::PKey::RSA.generate(2048)
end
def generated_public_key
generated_private_key.public_key.to_pem
end
def file_flags
base_flags = File::CREAT|File::TRUNC|File::RDWR
# Windows doesn't have symlinks, so it doesn't have NOFOLLOW
base_flags |= File::NOFOLLOW if defined?(File::NOFOLLOW)
base_flags
end
end
end
end
| 33.689024 | 129 | 0.648507 |
396140c25cf7f21b7ceae0bf693e48451a13b586 | 1,833 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Security::Mgmt::V2019_01_01
module Models
#
# Represents a security setting in Azure Security Center.
#
class Setting < SettingResource
include MsRestAzure
#
# Mapper for Setting class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Setting',
type: {
name: 'Composite',
class_name: 'Setting',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
kind: {
client_side_validation: true,
required: true,
serialized_name: 'kind',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 25.816901 | 70 | 0.443535 |
38818efc6c69a0ea69cb9e921ba77fac41d2d37e | 420 | cask :v1 => 'teleport' do
version :latest
sha256 :no_check
url 'http://www.abyssoft.com/software/teleport/downloads/teleport.zip'
name 'teleport'
appcast 'http://www.abyssoft.com/software/teleport/versions.xml',
:sha256 => 'c56815ad8028891f56f8ae5643e9e68b6ca59f2c31c49064e32f9822cc111533'
homepage 'http://www.abyssoft.com/software/teleport/'
license :gratis
app 'teleport/teleport.app'
end
| 30 | 87 | 0.747619 |
bb3ffb60ca5660102e25e138d27d68740b44ff47 | 792 | require 'formula'
class Lighttpd < Formula
homepage 'http://www.lighttpd.net/'
url 'http://download.lighttpd.net/lighttpd/releases-1.4.x/lighttpd-1.4.32.tar.bz2'
sha256 '60691b2dcf3ad2472c06b23d75eb0c164bf48a08a630ed3f308f61319104701f'
option 'with-lua', 'Include Lua scripting support for mod_magnet'
depends_on 'pkg-config' => :build
depends_on 'pcre'
depends_on 'lua' => :optional
depends_on 'libev' => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--with-openssl
--with-ldap
--with-zlib
--with-bzip2
--with-attr
]
args << "--with-lua" if build.with? 'lua'
args << "--with-libev" if build.with? 'libev'
system "./configure", *args
system "make install"
end
end
| 24 | 84 | 0.655303 |
5d011813853bf28a9336d096d0d20fb2e542ef49 | 751 | require 'roku-packager/key_gen'
require 'roku-packager/uploader'
require 'roku-packager/packager'
module RokuPackager
class Client
def initialize(development_roku_ip, logger = NullLogger.new)
@host = development_roku_ip
@logger = logger
end
attr_reader :password, :dev_id, :download_url
def package(name, path_to_application_zip)
@logger.info 'Generating keys...'
@dev_id, @password = KeyGen.new(@host).create
@logger.info 'Uploading application file...'
Uploader.new(@host).upload(path_to_application_zip)
@logger.info 'Building package...'
uri = Packager.new(@host, name, @password).submit
uri.to_s
end
end
class NullLogger
def info(*args)
end
end
end
| 22.757576 | 64 | 0.687084 |
795d45e0ebeaf96a886d1ae3ea59d3eb8a2fd416 | 1,026 | # Copyright 2017 Google, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START welcome_routes]
Rails.application.routes.draw do
get 'welcome/index'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
root 'welcome#index'
end
# [END welcome_routes]
=begin
# [START boilerplate]
Rails.application.routes.draw do
get 'welcome/index'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
# [END boilerplate]
=end
| 31.090909 | 101 | 0.758285 |
f72bf1be778ac19032c0fa1f77f33af1530ee336 | 438 | # Be sure to restart your server when you modify this file.
MelbourneStatus::Application.config.session_store :cookie_store, key: '_melbourne_status_session'
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rails generate session_migration")
# MelbourneStatus::Application.config.session_store :active_record_store
| 48.666667 | 97 | 0.821918 |
f7a813723ed389601df9385153711e1a962ab776 | 842 | require_relative "boot"
require "rails/all"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Verleng
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 7.0
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
config.action_mailer.delivery_method = :ohmysmtp
config.action_mailer.ohmysmtp_settings = {api_token: ENV["OHMYSMTP_API_TOKEN"]}
end
end
| 32.384615 | 83 | 0.744656 |
33dfb52403176c06ef590072dfc48e3c9728873f | 9,061 | #
# Author:: Stephen Delano (<[email protected]>)
# Author:: Seth Falcon (<[email protected]>)
# Author:: John Keiser (<[email protected]>)
# Author:: Kyle Goodwin (<[email protected]>)
# Copyright:: Copyright 2010-2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/config'
require 'chef/mash'
require 'chef/mixin/params_validate'
require 'chef/mixin/from_file'
require 'chef/version_constraint'
class Chef
class Environment
DEFAULT = "default"
include Chef::Mixin::ParamsValidate
include Chef::Mixin::FromFile
COMBINED_COOKBOOK_CONSTRAINT = /(.+)(?:[\s]+)((?:#{Chef::VersionConstraint::OPS.join('|')})(?:[\s]+).+)$/.freeze
def initialize
@name = ''
@description = ''
@default_attributes = Mash.new
@override_attributes = Mash.new
@cookbook_versions = Hash.new
end
def chef_server_rest
Chef::REST.new(Chef::Config[:chef_server_url])
end
def self.chef_server_rest
Chef::REST.new(Chef::Config[:chef_server_url])
end
def name(arg=nil)
set_or_return(
:name,
arg,
{ :regex => /^[\-[:alnum:]_]+$/, :kind_of => String }
)
end
def description(arg=nil)
set_or_return(
:description,
arg,
:kind_of => String
)
end
def default_attributes(arg=nil)
set_or_return(
:default_attributes,
arg,
:kind_of => Hash
)
end
def default_attributes=(attrs)
default_attributes(attrs)
end
def override_attributes(arg=nil)
set_or_return(
:override_attributes,
arg,
:kind_of => Hash
)
end
def override_attributes=(attrs)
override_attributes(attrs)
end
def cookbook_versions(arg=nil)
set_or_return(
:cookbook_versions,
arg,
{
:kind_of => Hash,
:callbacks => {
"should be a valid set of cookbook version requirements" => lambda { |cv| Chef::Environment.validate_cookbook_versions(cv) }
}
}
)
end
def cookbook(cookbook, version)
validate({
:version => version
},{
:version => {
:callbacks => { "should be a valid version requirement" => lambda { |v| Chef::Environment.validate_cookbook_version(v) } }
}
})
@cookbook_versions[cookbook] = version
end
def to_hash
result = {
"name" => @name,
"description" => @description,
"cookbook_versions" => @cookbook_versions,
"json_class" => self.class.name,
"chef_type" => "environment",
"default_attributes" => @default_attributes,
"override_attributes" => @override_attributes
}
result
end
def to_json(*a)
Chef::JSONCompat.to_json(to_hash, *a)
end
def update_from!(o)
description(o.description)
cookbook_versions(o.cookbook_versions)
default_attributes(o.default_attributes)
override_attributes(o.override_attributes)
self
end
def update_attributes_from_params(params)
unless params[:default_attributes].nil? || params[:default_attributes].size == 0
default_attributes(Chef::JSONCompat.from_json(params[:default_attributes]))
end
unless params[:override_attributes].nil? || params[:override_attributes].size == 0
override_attributes(Chef::JSONCompat.from_json(params[:override_attributes]))
end
end
def update_from_params(params)
# reset because everything we need will be in the params, this is necessary because certain constraints
# may have been removed in the params and need to be removed from cookbook_versions as well.
bkup_cb_versions = cookbook_versions
cookbook_versions(Hash.new)
valid = true
begin
name(params[:name])
rescue Chef::Exceptions::ValidationFailed => e
invalid_fields[:name] = e.message
valid = false
end
description(params[:description])
unless params[:cookbook_version].nil?
params[:cookbook_version].each do |index, cookbook_constraint_spec|
unless (cookbook_constraint_spec.nil? || cookbook_constraint_spec.size == 0)
valid = valid && update_cookbook_constraint_from_param(index, cookbook_constraint_spec)
end
end
end
update_attributes_from_params(params)
valid = validate_required_attrs_present && valid
cookbook_versions(bkup_cb_versions) unless valid # restore the old cookbook_versions if valid is false
valid
end
def update_cookbook_constraint_from_param(index, cookbook_constraint_spec)
valid = true
md = cookbook_constraint_spec.match(COMBINED_COOKBOOK_CONSTRAINT)
if md.nil? || md[2].nil?
valid = false
add_cookbook_constraint_error(index, cookbook_constraint_spec)
elsif self.class.validate_cookbook_version(md[2])
cookbook_versions[md[1]] = md[2]
else
valid = false
add_cookbook_constraint_error(index, cookbook_constraint_spec)
end
valid
end
def add_cookbook_constraint_error(index, cookbook_constraint_spec)
invalid_fields[:cookbook_version] ||= {}
invalid_fields[:cookbook_version][index] = "#{cookbook_constraint_spec} is not a valid cookbook constraint"
end
def invalid_fields
@invalid_fields ||= {}
end
def validate_required_attrs_present
if name.nil? || name.size == 0
invalid_fields[:name] ||= "name cannot be empty"
false
else
true
end
end
def self.json_create(o)
environment = new
environment.name(o["name"])
environment.description(o["description"])
environment.cookbook_versions(o["cookbook_versions"])
environment.default_attributes(o["default_attributes"])
environment.override_attributes(o["override_attributes"])
environment
end
def self.list(inflate=false)
if inflate
response = Hash.new
Chef::Search::Query.new.search(:environment) do |e|
response[e.name] = e unless e.nil?
end
response
else
chef_server_rest.get_rest("environments")
end
end
def self.load(name)
if Chef::Config[:solo]
load_from_file(name)
else
chef_server_rest.get_rest("environments/#{name}")
end
end
def self.load_from_file(name)
unless File.directory?(Chef::Config[:environment_path])
raise Chef::Exceptions::InvalidEnvironmentPath, "Environment path '#{Chef::Config[:environment_path]}' is invalid"
end
js_file = File.join(Chef::Config[:environment_path], "#{name}.json")
rb_file = File.join(Chef::Config[:environment_path], "#{name}.rb")
if File.exists?(js_file)
# from_json returns object.class => json_class in the JSON.
Chef::JSONCompat.from_json(IO.read(js_file))
elsif File.exists?(rb_file)
environment = Chef::Environment.new
environment.name(name)
environment.from_file(rb_file)
environment
else
raise Chef::Exceptions::EnvironmentNotFound, "Environment '#{name}' could not be loaded from disk"
end
end
def destroy
chef_server_rest.delete_rest("environments/#{@name}")
end
def save
begin
chef_server_rest.put_rest("environments/#{@name}", self)
rescue Net::HTTPServerException => e
raise e unless e.response.code == "404"
chef_server_rest.post_rest("environments", self)
end
self
end
def create
chef_server_rest.post_rest("environments", self)
self
end
def self.load_filtered_recipe_list(environment)
chef_server_rest.get_rest("environments/#{environment}/recipes")
end
def to_s
@name
end
def self.validate_cookbook_versions(cv)
return false unless cv.kind_of?(Hash)
cv.each do |cookbook, version|
return false unless Chef::Environment.validate_cookbook_version(version)
end
true
end
def self.validate_cookbook_version(version)
begin
if Chef::Config[:solo]
raise Chef::Exceptions::IllegalVersionConstraint,
"Environment cookbook version constraints not allowed in chef-solo"
else
Chef::VersionConstraint.new version
true
end
rescue ArgumentError
false
end
end
end
end
| 28.583596 | 136 | 0.650701 |
386553857f578539b8e1b9cf1594f792816d6461 | 675 | require 'spec_helper'
require 'generator_spec/test_case'
require 'generators/refinery/core/core_generator'
module Refinery
describe CoreGenerator do
include GeneratorSpec::TestCase
destination File.expand_path("../../../../../tmp", __FILE__)
before do
prepare_destination
run_generator
end
specify do
expect(destination_root).to have_structure {
directory "config" do
directory "initializers" do
directory "refinery" do
file "core.rb" do
contains "Refinery::Core.configure do |config|"
end
end
end
end
}
end
end
end
| 22.5 | 64 | 0.61037 |
1d675a775252010a5bcead34b761128db249b870 | 16,444 | # frozen_string_literal: true
#
# This class implements a pretty printing algorithm. It finds line breaks and
# nice indentations for grouped structure.
#
# By default, the class assumes that primitive elements are strings and each
# byte in the strings have single column in width. But it can be used for
# other situations by giving suitable arguments for some methods:
# * newline object and space generation block for PrettyPrint.new
# * optional width argument for PrettyPrint#text
# * PrettyPrint#breakable
#
# There are several candidate uses:
# * text formatting using proportional fonts
# * multibyte characters which has columns different to number of bytes
# * non-string formatting
#
# == Bugs
# * Box based formatting?
# * Other (better) model/algorithm?
#
# Report any bugs at http://bugs.ruby-lang.org
#
# == References
# Christian Lindig, Strictly Pretty, March 2000,
# http://www.st.cs.uni-sb.de/~lindig/papers/#pretty
#
# Philip Wadler, A prettier printer, March 1998,
# http://homepages.inf.ed.ac.uk/wadler/topics/language-design.html#prettier
#
# == Author
# Tanaka Akira <[email protected]>
#
class PrettyPrint
# This is a convenience method which is same as follows:
#
# begin
# q = PrettyPrint.new(output, maxwidth, newline, &genspace)
# ...
# q.flush
# output
# end
#
def PrettyPrint.format(output=''.dup, maxwidth=79, newline="\n", genspace=lambda {|n| ' ' * n})
q = PrettyPrint.new(output, maxwidth, newline, &genspace)
yield q
q.flush
output
end
# This is similar to PrettyPrint::format but the result has no breaks.
#
# +maxwidth+, +newline+ and +genspace+ are ignored.
#
# The invocation of +breakable+ in the block doesn't break a line and is
# treated as just an invocation of +text+.
#
def PrettyPrint.singleline_format(output=''.dup, maxwidth=nil, newline=nil, genspace=nil)
q = SingleLine.new(output)
yield q
output
end
# Creates a buffer for pretty printing.
#
# +output+ is an output target. If it is not specified, '' is assumed. It
# should have a << method which accepts the first argument +obj+ of
# PrettyPrint#text, the first argument +sep+ of PrettyPrint#breakable, the
# first argument +newline+ of PrettyPrint.new, and the result of a given
# block for PrettyPrint.new.
#
# +maxwidth+ specifies maximum line length. If it is not specified, 79 is
# assumed. However actual outputs may overflow +maxwidth+ if long
# non-breakable texts are provided.
#
# +newline+ is used for line breaks. "\n" is used if it is not specified.
#
# The block is used to generate spaces. {|width| ' ' * width} is used if it
# is not given.
#
def initialize(output=''.dup, maxwidth=79, newline="\n", &genspace)
@output = output
@maxwidth = maxwidth
@newline = newline
@genspace = genspace || lambda {|n| ' ' * n}
@output_width = 0
@buffer_width = 0
@buffer = []
root_group = Group.new(0)
@group_stack = [root_group]
@group_queue = GroupQueue.new(root_group)
@indent = 0
end
# The output object.
#
# This defaults to '', and should accept the << method
attr_reader :output
# The maximum width of a line, before it is separated in to a newline
#
# This defaults to 79, and should be an Integer
attr_reader :maxwidth
# The value that is appended to +output+ to add a new line.
#
# This defaults to "\n", and should be String
attr_reader :newline
# A lambda or Proc, that takes one argument, of an Integer, and returns
# the corresponding number of spaces.
#
# By default this is:
# lambda {|n| ' ' * n}
attr_reader :genspace
# The number of spaces to be indented
attr_reader :indent
# The PrettyPrint::GroupQueue of groups in stack to be pretty printed
attr_reader :group_queue
# Returns the group most recently added to the stack.
#
# Contrived example:
# out = ""
# => ""
# q = PrettyPrint.new(out)
# => #<PrettyPrint:0x82f85c0 @output="", @maxwidth=79, @newline="\n", @genspace=#<Proc:0x82f8368@/home/vbatts/.rvm/rubies/ruby-head/lib/ruby/2.0.0/prettyprint.rb:82 (lambda)>, @output_width=0, @buffer_width=0, @buffer=[], @group_stack=[#<PrettyPrint::Group:0x82f8138 @depth=0, @breakables=[], @break=false>], @group_queue=#<PrettyPrint::GroupQueue:0x82fb7c0 @queue=[[#<PrettyPrint::Group:0x82f8138 @depth=0, @breakables=[], @break=false>]]>, @indent=0>
# q.group {
# q.text q.current_group.inspect
# q.text q.newline
# q.group(q.current_group.depth + 1) {
# q.text q.current_group.inspect
# q.text q.newline
# q.group(q.current_group.depth + 1) {
# q.text q.current_group.inspect
# q.text q.newline
# q.group(q.current_group.depth + 1) {
# q.text q.current_group.inspect
# q.text q.newline
# }
# }
# }
# }
# => 284
# puts out
# #<PrettyPrint::Group:0x8354758 @depth=1, @breakables=[], @break=false>
# #<PrettyPrint::Group:0x8354550 @depth=2, @breakables=[], @break=false>
# #<PrettyPrint::Group:0x83541cc @depth=3, @breakables=[], @break=false>
# #<PrettyPrint::Group:0x8347e54 @depth=4, @breakables=[], @break=false>
def current_group
@group_stack.last
end
# Breaks the buffer into lines that are shorter than #maxwidth
def break_outmost_groups
while @maxwidth < @output_width + @buffer_width
return unless group = @group_queue.deq
until group.breakables.empty?
data = @buffer.shift
@output_width = data.output(@output, @output_width)
@buffer_width -= data.width
end
while [email protected]? && Text === @buffer.first
text = @buffer.shift
@output_width = text.output(@output, @output_width)
@buffer_width -= text.width
end
end
end
# This adds +obj+ as a text of +width+ columns in width.
#
# If +width+ is not specified, obj.length is used.
#
def text(obj, width=obj.length)
if @buffer.empty?
@output << obj
@output_width += width
else
text = @buffer.last
unless Text === text
text = Text.new
@buffer << text
end
text.add(obj, width)
@buffer_width += width
break_outmost_groups
end
end
# This is similar to #breakable except
# the decision to break or not is determined individually.
#
# Two #fill_breakable under a group may cause 4 results:
# (break,break), (break,non-break), (non-break,break), (non-break,non-break).
# This is different to #breakable because two #breakable under a group
# may cause 2 results:
# (break,break), (non-break,non-break).
#
# The text +sep+ is inserted if a line is not broken at this point.
#
# If +sep+ is not specified, " " is used.
#
# If +width+ is not specified, +sep.length+ is used. You will have to
# specify this when +sep+ is a multibyte character, for example.
#
def fill_breakable(sep=' ', width=sep.length)
group { breakable sep, width }
end
# This says "you can break a line here if necessary", and a +width+\-column
# text +sep+ is inserted if a line is not broken at the point.
#
# If +sep+ is not specified, " " is used.
#
# If +width+ is not specified, +sep.length+ is used. You will have to
# specify this when +sep+ is a multibyte character, for example.
#
def breakable(sep=' ', width=sep.length)
group = @group_stack.last
if group.break?
flush
@output << @newline
@output << @genspace.call(@indent)
@output_width = @indent
@buffer_width = 0
else
@buffer << Breakable.new(sep, width, self)
@buffer_width += width
break_outmost_groups
end
end
# This says "force a line break here".
#
# It will force the current group's "breakables" to break.
def break
breakable
current_group.break
end
# Groups line break hints added in the block. The line break hints are all
# to be used or not.
#
# If +indent+ is specified, the method call is regarded as nested by
# nest(indent) { ... }.
#
# If +open_obj+ is specified, <tt>text open_obj, open_width</tt> is called
# before grouping. If +close_obj+ is specified, <tt>text close_obj,
# close_width</tt> is called after grouping.
#
def group(indent=0, open_obj='', close_obj='', open_width=open_obj.length, close_width=close_obj.length)
text open_obj, open_width
group_sub {
nest(indent) {
yield
}
}
text close_obj, close_width
end
# Takes a block and queues a new group that is indented 1 level further.
def group_sub
group = Group.new(@group_stack.last.depth + 1)
@group_stack.push group
@group_queue.enq group
begin
yield
ensure
@group_stack.pop
if group.breakables.empty?
@group_queue.delete group
end
end
end
# Increases left margin after newline with +indent+ for line breaks added in
# the block.
#
def nest(indent)
@indent += indent
begin
yield
ensure
@indent -= indent
end
end
# outputs buffered data.
#
def flush
@buffer.each {|data|
@output_width = data.output(@output, @output_width)
}
@buffer.clear
@buffer_width = 0
end
# The Text class is the means by which to collect strings from objects.
#
# This class is intended for internal use of the PrettyPrint buffers.
class Text # :nodoc:
# Creates a new text object.
#
# This constructor takes no arguments.
#
# The workflow is to append a PrettyPrint::Text object to the buffer, and
# being able to call the buffer.last() to reference it.
#
# As there are objects, use PrettyPrint::Text#add to include the objects
# and the width to utilized by the String version of this object.
def initialize
@objs = []
@width = 0
end
# The total width of the objects included in this Text object.
attr_reader :width
# Render the String text of the objects that have been added to this Text object.
#
# Output the text to +out+, and increment the width to +output_width+
def output(out, output_width)
@objs.each {|obj| out << obj}
output_width + @width
end
# Include +obj+ in the objects to be pretty printed, and increment
# this Text object's total width by +width+
def add(obj, width)
@objs << obj
@width += width
end
end
# The Breakable class is used for breaking up object information
#
# This class is intended for internal use of the PrettyPrint buffers.
class Breakable # :nodoc:
# Create a new Breakable object.
#
# Arguments:
# * +sep+ String of the separator
# * +width+ Integer width of the +sep+
# * +q+ parent PrettyPrint object, to base from
def initialize(sep, width, q)
@obj = sep
@width = width
@pp = q
@indent = q.indent
@group = q.current_group
@group.breakables.push self
end
# Holds the separator String
#
# The +sep+ argument from ::new
attr_reader :obj
# The width of +obj+ / +sep+
attr_reader :width
# The number of spaces to indent.
#
# This is inferred from +q+ within PrettyPrint, passed in ::new
attr_reader :indent
# Render the String text of the objects that have been added to this
# Breakable object.
#
# Output the text to +out+, and increment the width to +output_width+
def output(out, output_width)
@group.breakables.shift
if @group.break?
out << @pp.newline
out << @pp.genspace.call(@indent)
@indent
else
@pp.group_queue.delete @group if @group.breakables.empty?
out << @obj
output_width + @width
end
end
end
# The Group class is used for making indentation easier.
#
# While this class does neither the breaking into newlines nor indentation,
# it is used in a stack (as well as a queue) within PrettyPrint, to group
# objects.
#
# For information on using groups, see PrettyPrint#group
#
# This class is intended for internal use of the PrettyPrint buffers.
class Group # :nodoc:
# Create a Group object
#
# Arguments:
# * +depth+ - this group's relation to previous groups
def initialize(depth)
@depth = depth
@breakables = []
@break = false
end
# This group's relation to previous groups
attr_reader :depth
# Array to hold the Breakable objects for this Group
attr_reader :breakables
# Makes a break for this Group, and returns true
def break
@break = true
end
# Boolean of whether this Group has made a break
def break?
@break
end
# Boolean of whether this Group has been queried for being first
#
# This is used as a predicate, and ought to be called first.
def first?
if defined? @first
false
else
@first = false
true
end
end
end
# The GroupQueue class is used for managing the queue of Group to be pretty
# printed.
#
# This queue groups the Group objects, based on their depth.
#
# This class is intended for internal use of the PrettyPrint buffers.
class GroupQueue # :nodoc:
# Create a GroupQueue object
#
# Arguments:
# * +groups+ - one or more PrettyPrint::Group objects
def initialize(*groups)
@queue = []
groups.each {|g| enq g}
end
# Enqueue +group+
#
# This does not strictly append the group to the end of the queue,
# but instead adds it in line, base on the +group.depth+
def enq(group)
depth = group.depth
@queue << [] until depth < @queue.length
@queue[depth] << group
end
# Returns the outer group of the queue
def deq
@queue.each {|gs|
(gs.length-1).downto(0) {|i|
unless gs[i].breakables.empty?
group = gs.slice!(i, 1).first
group.break
return group
end
}
gs.each {|group| group.break}
gs.clear
}
return nil
end
# Remote +group+ from this queue
def delete(group)
@queue[group.depth].delete(group)
end
end
# PrettyPrint::SingleLine is used by PrettyPrint.singleline_format
#
# It is passed to be similar to a PrettyPrint object itself, by responding to:
# * #text
# * #breakable
# * #nest
# * #group
# * #flush
# * #first?
#
# but instead, the output has no line breaks
#
class SingleLine
# Create a PrettyPrint::SingleLine object
#
# Arguments:
# * +output+ - String (or similar) to store rendered text. Needs to respond to '<<'
# * +maxwidth+ - Argument position expected to be here for compatibility.
# This argument is a noop.
# * +newline+ - Argument position expected to be here for compatibility.
# This argument is a noop.
def initialize(output, maxwidth=nil, newline=nil)
@output = output
@first = [true]
end
# Add +obj+ to the text to be output.
#
# +width+ argument is here for compatibility. It is a noop argument.
def text(obj, width=nil)
@output << obj
end
# Appends +sep+ to the text to be output. By default +sep+ is ' '
#
# +width+ argument is here for compatibility. It is a noop argument.
def breakable(sep=' ', width=nil)
@output << sep
end
# Takes +indent+ arg, but does nothing with it.
#
# Yields to a block.
def nest(indent) # :nodoc:
yield
end
# Opens a block for grouping objects to be pretty printed.
#
# Arguments:
# * +indent+ - noop argument. Present for compatibility.
# * +open_obj+ - text appended before the &blok. Default is ''
# * +close_obj+ - text appended after the &blok. Default is ''
# * +open_width+ - noop argument. Present for compatibility.
# * +close_width+ - noop argument. Present for compatibility.
def group(indent=nil, open_obj='', close_obj='', open_width=nil, close_width=nil)
@first.push true
@output << open_obj
yield
@output << close_obj
@first.pop
end
# Method present for compatibility, but is a noop
def flush # :nodoc:
end
# This is used as a predicate, and ought to be called first.
def first?
result = @first[-1]
@first[-1] = false
result
end
end
end
| 29.104425 | 456 | 0.639686 |
f8be2baf0b6da66b34eb08029014b3f71d3777c2 | 1,443 | module Vagrant
module Util
autoload :Busy, 'vagrant/util/busy'
autoload :CheckpointClient, 'vagrant/util/checkpoint_client'
autoload :CommandDeprecation, 'vagrant/util/command_deprecation'
autoload :Counter, 'vagrant/util/counter'
autoload :CredentialScrubber, 'vagrant/util/credential_scrubber'
autoload :DeepMerge, 'vagrant/util/deep_merge'
autoload :Env, 'vagrant/util/env'
autoload :Experimental, 'vagrant/util/experimental'
autoload :HashWithIndifferentAccess, 'vagrant/util/hash_with_indifferent_access'
autoload :GuestInspection, 'vagrant/util/guest_inspection'
autoload :LoggingFormatter, 'vagrant/util/logging_formatter'
autoload :Platform, 'vagrant/util/platform'
autoload :Retryable, 'vagrant/util/retryable'
autoload :SafeExec, 'vagrant/util/safe_exec'
autoload :SilenceWarnings, 'vagrant/util/silence_warnings'
autoload :StackedProcRunner, 'vagrant/util/stacked_proc_runner'
autoload :StringBlockEditor, 'vagrant/util/string_block_editor'
autoload :Subprocess, 'vagrant/util/subprocess'
autoload :TemplateRenderer, 'vagrant/util/template_renderer'
autoload :Which, 'vagrant/util/which'
end
end
| 57.72 | 84 | 0.638254 |
4a98c9fc971bb193a699759dbe1bae0f65398191 | 4,388 | module Rubinius
module FFI
module Generators
##
# Constants turns C constants into ruby values.
class Constants
class Constant
attr_reader :name, :format, :cast
attr_accessor :value
def initialize(name, format, cast, ruby_name=nil, converter=nil)
@name = name
@format = format
@cast = cast
@ruby_name = ruby_name
@converter = converter
@value = nil
end
def value?
@value != nil
end
def converted_value
@converter ? @converter.call(@value) : @value
end
def ruby_name
@ruby_name || @name
end
def to_ruby
"#{ruby_name} = #{converted_value}"
end
end
attr_reader :constants
##
# Creates a new constant generator that uses +prefix+ as a name, and an
# options hash.
#
# The only option is :required, which if set to true raises an error if a
# constant you have requested was not found.
#
# When passed a block, #calculate is automatically called at the end of
# the block, otherwise you must call it yourself.
def initialize(prefix=nil, options={})
@includes = []
@include_dirs = []
@constants = {}
@prefix = prefix
@platform = Platform.new
@required = options[:required]
if block_given?
yield self
calculate
end
end
def [](name)
@constants[name].value
end
##
# Request the value for C constant +name+. +format+ is a printf format
# string to print the value out, and +cast+ is a C cast for the value.
# +ruby_name+ allows you to give the constant an alternate ruby name for
# #to_ruby. +converter+ or +converter_proc+ allow you to convert the
# value from a string to the appropriate type for #to_ruby.
def const(name, format=nil, cast=nil, ruby_name=nil, converter=nil, &block)
format ||= '%ld'
cast ||= '(long)'
if block and converter
raise ArgumentError, "Supply only converter or converter block"
end
converter = block if converter.nil?
const = Constant.new name, format, cast, ruby_name, converter
@constants[name.to_s] = const
return const
end
def source(io)
io.puts "#include <stdio.h>"
@includes.each do |inc|
io.puts "#include <#{inc}>"
end
io.puts "#include <stddef.h>\n\n"
io.puts "int main(int argc, char **argv)\n{"
@constants.each_value do |const|
io.puts <<-EOF
#ifdef #{const.name}
printf("#{const.name} #{const.format}\\n", #{const.cast}#{const.name});
#endif
EOF
end
io.puts "\n\treturn 0;\n}"
end
def prepare(name, target)
@platform.compile(@include_dirs, name, target)
end
def prepare_failed
"Compilation error generating constants #{@prefix}"
end
def process(target)
target
end
def process_failed
"Error generating constants #{@prefix}"
end
def calculate
output = BodyGuard.new(self, @prefix, @platform).perform
output.each_line do |line|
line =~ /^(\S+)\s(.*)$/
const = @constants[$1]
const.value = $2
end
missing_constants = @constants.reject { |_, c| c.value? }.keys
if @required and not missing_constants.empty?
raise "Missing required constants for #{@prefix}: #{missing_constants.join ', '}"
end
end
def write_constants(io)
@constants.each do |name, constant|
io.print @prefix, "."
io.puts constant.to_ruby
end
end
##
# Outputs values for discovered constants. If the constant's value was
# not discovered it is not omitted.
def to_ruby
@constants.sort_by { |name,| name }.map do |name, constant|
if constant.value?
constant.to_ruby
else
"# #{name} not defined"
end
end.join "\n"
end
def include(i)
@includes << i
end
def include_dir(i)
@include_dirs << i
end
end
end
ConstGenerator = Generators::Constants
end
end
| 24.243094 | 91 | 0.559253 |
ff47c5620f7587bf665f30cc0dfc478f15e692ca | 105 | # frozen_string_literal: true
module Firebug
# The current version of Firebug
VERSION = '1.4.2'
end
| 15 | 34 | 0.733333 |
e2930ece038da26d5c595d6bb9b1903533953845 | 1,953 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::ComplianceManagement::Frameworks::Update do
include GraphqlHelpers
let_it_be(:framework) { create(:compliance_framework) }
let(:user) { framework.namespace.owner }
let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
let(:params) do
{
name: 'New Name',
description: 'New Description',
color: '#AAAAA1'
}
end
subject { mutation.resolve(id: global_id_of(framework), params: params) }
context 'feature is licensed' do
before do
stub_licensed_features(custom_compliance_frameworks: true)
end
context 'parameters are valid' do
it 'returns the new object' do
response = subject[:compliance_framework]
expect(response.name).to eq('New Name')
expect(response.description).to eq('New Description')
expect(response.color).to eq('#AAAAA1')
end
it 'returns no errors' do
expect(subject[:errors]).to be_empty
end
context 'current_user is not authorized to update framework' do
let_it_be(:user) { create(:user) }
it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
context 'parameters are invalid' do
let(:params) do
{
name: '',
description: '',
color: 'AAAAA1'
}
end
it 'does not change the framework attributes' do
expect { subject }.not_to change { framework.name }
expect { subject }.not_to change { framework.description }
expect { subject }.not_to change { framework.color }
end
it 'returns validation errors' do
expect(subject[:errors]).to contain_exactly("Name can't be blank", "Description can't be blank", "Color must be a valid color code")
end
end
end
end
| 27.9 | 140 | 0.642089 |
18da06bdf09295b9395de32713d6adfbdff01db9 | 1,613 | require 'puppet'
require 'puppet/type/tempest_config'
describe 'Puppet::Type.type(:tempest_config)' do
before :each do
@tempest_config = Puppet::Type.type(:tempest_config).new(:name => 'DEFAULT/foo', :value => 'bar')
end
it 'should require a name' do
expect {
Puppet::Type.type(:tempest_config).new({})
}.to raise_error(Puppet::Error, 'Title or name must be provided')
end
it 'should not expect a name with whitespace' do
expect {
Puppet::Type.type(:tempest_config).new(:name => 'f oo')
}.to raise_error(Puppet::Error, /Invalid value "f oo"/)
end
it 'should fail when there is no section' do
expect {
Puppet::Type.type(:tempest_config).new(:name => 'foo')
}.to raise_error(Puppet::Error, /Invalid value "foo"/)
end
it 'should not require a value when ensure is absent' do
Puppet::Type.type(:tempest_config).new(:name => 'DEFAULT/foo', :ensure => :absent)
end
it 'should accept a valid value' do
@tempest_config[:value] = 'bar'
expect(@tempest_config[:value]).to eq('bar')
end
it 'should not accept a value with whitespace' do
@tempest_config[:value] = 'b ar'
expect(@tempest_config[:value]).to eq('b ar')
end
it 'should accept valid ensure values' do
@tempest_config[:ensure] = :present
expect(@tempest_config[:ensure]).to eq(:present)
@tempest_config[:ensure] = :absent
expect(@tempest_config[:ensure]).to eq(:absent)
end
it 'should not accept invalid ensure values' do
expect {
@tempest_config[:ensure] = :latest
}.to raise_error(Puppet::Error, /Invalid value/)
end
end
| 29.87037 | 101 | 0.6677 |
ff327eb77384e3b05a344c14136d1ddb2eef606f | 10,095 | # frozen_string_literal: true
# == Schema Information
#
# Table name: projects
#
# bugfix_fork_criteria :string default([]), not null, is an Array
# bugfix_fork_of :string
# description :text
# description_tsvector :tsvector
# github_repo_path :citext
# is_bugfix_fork :boolean default(FALSE), not null
# permalink :string not null, primary key
# permalink_tsvector :tsvector
# rubygem_name :string
# score :decimal(5, 2)
# created_at :datetime not null
# updated_at :datetime not null
#
# Indexes
#
# index_projects_on_bugfix_fork_of (bugfix_fork_of)
# index_projects_on_description_tsvector (description_tsvector) USING gin
# index_projects_on_is_bugfix_fork (is_bugfix_fork)
# index_projects_on_permalink (permalink) UNIQUE
# index_projects_on_permalink_tsvector (permalink_tsvector) USING gin
# index_projects_on_rubygem_name (rubygem_name) UNIQUE
#
# Foreign Keys
#
# fk_rails_... (rubygem_name => rubygems.name)
#
require "rails_helper"
RSpec.describe Project, type: :model do
it "does not allow mismatches between permalink and rubygem name" do
project = described_class.create! permalink: "simplecov"
expect { project.update! rubygem_name: "rails" }.to raise_error(
ActiveRecord::StatementInvalid,
/check_project_permalink_and_rubygem_name_parity/
)
end
describe ".includes_associations" do
before do
rand(3..14).times { |i| Factories.project "project #{i}" }
end
it "only makes expected amount of queries" do
nested_accessor = ->(p) { [p.categories.map(&:name), p.rubygem_downloads, p.github_repo_stargazers_count] }
# Sometimes activerecord sprinkles in a `SELECT a.attname, format_type(a.atttypid, a.atttypmod),`
# here for good measure. Actually it's supposed to be 4 queries.
expect { described_class.includes_associations.map(&nested_accessor) }
.to make_database_queries(count: 4..6)
end
end
describe ".find_for_show!" do
let(:project) do
Factories.project "sample"
end
it "eager-loads readme if present" do
project.github_repo.create_readme! html: "hello world", etag: "1234"
found_instance = described_class.find_for_show!(project.permalink)
expect { found_instance.github_repo_readme }.not_to make_database_queries
end
end
describe ".with_bugfix_forks" do
before do
Factories.project "regular"
Factories.project("forked").tap { |p| p.update! is_bugfix_fork: true }
end
it "omits bugfix_forks when given false" do
expect(described_class.with_bugfix_forks(false).pluck(:permalink)).to be == %w[regular]
end
it "includes bugfix_forks when given true" do
scope = described_class.with_bugfix_forks(true).order(permalink: :asc)
expect(scope.pluck(:permalink)).to be == %w[forked regular]
end
end
describe ".suggest" do
it "does not make any database query for empty param" do
expect { described_class.suggest(" ") }.not_to make_database_queries
end
it "returns empty array for empty param" do
expect(described_class.suggest(" ")).to be == []
end
it "fetches projects from database that match given name ordered by score" do
Factories.project "demofoo"
Factories.project "foobar", score: 10
Factories.project "foo", score: 5
Factories.project "foofoo", score: nil
expect(described_class.suggest("fo")).to be == %w[foobar foo foofoo]
end
it "is case-insensitive" do
Factories.project "DeMo"
expect(described_class.suggest("dem")).to be == %w[DeMo]
end
it "sanitizes user-provided special chars" do
Factories.project "foof"
expect(described_class.suggest("%oof")).to be == %w[]
end
end
describe ".search" do
it "can find a matching project" do
expected = described_class.create! permalink: "widgets", score: 1
described_class.create! permalink: "airplanes", score: 1
described_class.create! permalink: "rockets", score: 1
expect(described_class.search("widget")).to be == [expected]
end
it "does not return projects without a score" do
expected = described_class.create! permalink: "somethingelse", score: 1, description: "Provides amazing widgets"
described_class.create! permalink: "widgets"
expect(described_class.search("widget")).to be == [expected]
end
describe "for projects flagged as bugfix forks" do
let(:expected) do
described_class.create! permalink: "somethingelse", score: 10, description: "Provides amazing widgets"
end
before do
described_class.create! permalink: "widgets", is_bugfix_fork: true, score: 1
end
it "does not include them by default" do
expect(described_class.search("widget")).to be == [expected]
end
it "includes them when called with show_forks true" do
expect(described_class.search("widget", show_forks: true)).to be == [expected, described_class.find("widgets")]
end
end
describe "result order" do
before do
(1..3).each do |i|
rubygem = Rubygem.create! name: "widgets#{i}", downloads: 10 - i, current_version: "1.0"
described_class.create! permalink: rubygem.name, score: 10 + i, rubygem: rubygem
end
end
it "sorts results by the search result rank by default" do
described_class.find("widgets2").update! description: "widgets widgets!"
expected = %w[widgets2 widgets3 widgets1]
expect(described_class.search("widget").pluck(:permalink)).to be == expected
end
it "allows to pass a custom order instance" do
order = Project::Order.new(order: "rubygem_downloads")
expected = %w[widgets1 widgets2 widgets3]
expect(described_class.search("widget", order: order).pluck(:permalink)).to be == expected
end
end
end
describe "#github_only?" do
it "is false when no / is present in permalink" do
expect(described_class.new(permalink: "foobar")).not_to be_github_only
end
it "is true when a / is present in permalink" do
expect(described_class.new(permalink: "foo/bar")).to be_github_only
end
end
describe "#github_repo_path=" do
it "normalizes the path to the stripped, downcase variant" do
expect(described_class.new(github_repo_path: " FoO/BaR ").github_repo_path).to be == "foo/bar"
end
end
describe "url delegation" do
%i[changelog_url documentation_url mailing_list_url].each do |url|
describe "##{url}" do
it "is fetched from the rubygem" do
project = described_class.new(rubygem: Rubygem.new(url => "foobar"))
expect(project.send(url)).to be == "foobar"
end
end
end
describe "#source_code_url" do
let(:project) do
described_class.new(
rubygem: Rubygem.new(source_code_url: "from_gem"),
github_repo: GithubRepo.new(path: "foo/bar")
)
end
it "prefers the gem's source code url" do
expect(project.source_code_url).to be == project.rubygem_source_code_url
end
it "falls back to github repo url if not given in gem" do
project.rubygem.source_code_url = nil
expect(project.source_code_url).to be == project.github_repo_url
end
end
describe "#homepage_url" do
let(:project) do
described_class.new(
rubygem: Rubygem.new(homepage_url: "from_gem"),
github_repo: GithubRepo.new(homepage_url: "from_repo")
)
end
it "prefers the gem's homepage url" do
expect(project.homepage_url).to be == project.rubygem_homepage_url
end
it "falls back to github repo homepage url if not given in gem" do
project.rubygem.homepage_url = nil
expect(project.homepage_url).to be == project.github_repo_homepage_url
end
end
describe "#wiki_url" do
let(:project) do
described_class.new(
rubygem: Rubygem.new(wiki_url: "from_gem"),
github_repo: GithubRepo.new(path: "foo/bar")
)
end
it "prefers the gem's wiki url" do
expect(project.wiki_url).to be == project.rubygem_wiki_url
end
it "falls back to github repo wiki url if not given in gem" do
project.rubygem.wiki_url = nil
expect(project.wiki_url).to be == project.github_repo_wiki_url
end
end
describe "#bug_tracker_url" do
let(:project) do
described_class.new(
rubygem: Rubygem.new(bug_tracker_url: "from_gem"),
github_repo: GithubRepo.new(path: "foo/bar")
)
end
it "prefers the gem's bug_tracker_url url" do
expect(project.bug_tracker_url).to be == project.rubygem_bug_tracker_url
end
it "falls back to github repo issues url if not given in gem" do
project.rubygem.bug_tracker_url = nil
expect(project.bug_tracker_url).to be == project.github_repo_issues_url
end
end
end
describe "permalink=" do
it "normalizes the permalink to the stripped, downcase variant for github repo" do
expect(described_class.new(permalink: " FoO/BaR ").permalink).to be == "foo/bar"
end
it "does not normalize the permalink for non-github project" do
expect(described_class.new(permalink: "FoOBaR").permalink).to be == "FoOBaR"
end
end
describe "#health" do
let(:project) { described_class.new }
it "passes itself to Project Health" do
expect(Project::Health).to receive(:new).with(project)
project.health
end
it "returns a project health instance" do
health = instance_double Project::Health
allow(Project::Health).to receive(:new).and_return(health)
expect(project.health).to be == health
end
it "memoizes the instance" do
expect(project.health.object_id).to be == project.health.object_id
end
end
end
| 33.762542 | 119 | 0.666667 |
01b9a6c9cd92e1de364c635d1444810bafe32dbd | 1,133 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-lex'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - Amazon Lex Runtime Service'
spec.description = 'Official AWS Ruby gem for Amazon Lex Runtime Service. This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'http://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-lex',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-lex/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3')
spec.add_dependency('aws-sigv4', '~> 1.0')
end
| 37.766667 | 120 | 0.663725 |
0355fa49c5dab848369a90c6da0e419343f6e640 | 473 | require "openssl"
class String
def encrypt(key)
cipher = OpenSSL::Cipher::AES.new(256, :CBC).encrypt
cipher.key = key
cipher.update(self) + cipher.final
end
def decrypt(key)
cipher = OpenSSL::Cipher::AES.new(256, :CBC).decrypt
cipher.key = key
cipher.update(self) + cipher.final
end
end
password = "password"
message = "String"
key = Digest::SHA256.digest(password)
message = message.encrypt(key)
puts message
puts message.decrypt(key)
| 18.92 | 56 | 0.693446 |
bf4f613e82e5d96e7a46998e7edc70b4f61d3689 | 38 | class Device < ActiveRecord::Base
end
| 12.666667 | 33 | 0.789474 |
031584802ac0df7b1e66104085782b1d34f9ff30 | 207 | require 'adhearsion'
require 'amequp'
RSpec.configure do |config|
config.color_enabled = true
config.tty = true
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
end
| 17.25 | 48 | 0.763285 |
4aaf7eca9d300922192124e2df5d78ee8e32f52c | 16,220 | require 'abstract_unit'
require 'controller/fake_models'
require 'pathname'
class TestControllerWithExtraEtags < ActionController::Base
etag { nil }
etag { 'ab' }
etag { :cde }
etag { [:f] }
etag { nil }
def fresh
render text: "stale" if stale?(etag: '123', template: false)
end
def array
render text: "stale" if stale?(etag: %w(1 2 3), template: false)
end
def with_template
if stale? template: 'test/hello_world'
render text: 'stale'
end
end
end
class TestController < ActionController::Base
protect_from_forgery
before_action :set_variable_for_layout
class LabellingFormBuilder < ActionView::Helpers::FormBuilder
end
layout :determine_layout
def name
nil
end
private :name
helper_method :name
def hello_world
end
def conditional_hello
if stale?(:last_modified => Time.now.utc.beginning_of_day, :etag => [:foo, 123])
render :action => 'hello_world'
end
end
def conditional_hello_with_record
record = Struct.new(:updated_at, :cache_key).new(Time.now.utc.beginning_of_day, "foo/123")
if stale?(record)
render :action => 'hello_world'
end
end
def conditional_hello_with_public_header
if stale?(:last_modified => Time.now.utc.beginning_of_day, :etag => [:foo, 123], :public => true)
render :action => 'hello_world'
end
end
def conditional_hello_with_public_header_with_record
record = Struct.new(:updated_at, :cache_key).new(Time.now.utc.beginning_of_day, "foo/123")
if stale?(record, :public => true)
render :action => 'hello_world'
end
end
def conditional_hello_with_public_header_and_expires_at
expires_in 1.minute
if stale?(:last_modified => Time.now.utc.beginning_of_day, :etag => [:foo, 123], :public => true)
render :action => 'hello_world'
end
end
def conditional_hello_with_expires_in
expires_in 60.1.seconds
render :action => 'hello_world'
end
def conditional_hello_with_expires_in_with_public
expires_in 1.minute, :public => true
render :action => 'hello_world'
end
def conditional_hello_with_expires_in_with_must_revalidate
expires_in 1.minute, :must_revalidate => true
render :action => 'hello_world'
end
def conditional_hello_with_expires_in_with_public_and_must_revalidate
expires_in 1.minute, :public => true, :must_revalidate => true
render :action => 'hello_world'
end
def conditional_hello_with_expires_in_with_public_with_more_keys
expires_in 1.minute, :public => true, 's-maxage' => 5.hours
render :action => 'hello_world'
end
def conditional_hello_with_expires_in_with_public_with_more_keys_old_syntax
expires_in 1.minute, :public => true, :private => nil, 's-maxage' => 5.hours
render :action => 'hello_world'
end
def conditional_hello_with_expires_now
expires_now
render :action => 'hello_world'
end
def conditional_hello_with_cache_control_headers
response.headers['Cache-Control'] = 'no-transform'
expires_now
render :action => 'hello_world'
end
def conditional_hello_with_bangs
render :action => 'hello_world'
end
before_action :handle_last_modified_and_etags, :only=>:conditional_hello_with_bangs
def handle_last_modified_and_etags
fresh_when(:last_modified => Time.now.utc.beginning_of_day, :etag => [ :foo, 123 ])
end
def heading
head :ok
end
# :ported:
def double_render
render :text => "hello"
render :text => "world"
end
def double_redirect
redirect_to :action => "double_render"
redirect_to :action => "double_render"
end
def render_and_redirect
render :text => "hello"
redirect_to :action => "double_render"
end
def render_to_string_and_render
@stuff = render_to_string :text => "here is some cached stuff"
render :text => "Hi web users! #{@stuff}"
end
def render_to_string_with_inline_and_render
render_to_string :inline => "<%= 'dlrow olleh'.reverse %>"
render :template => "test/hello_world"
end
def rendering_with_conflicting_local_vars
@name = "David"
render :action => "potential_conflicts"
end
def hello_world_from_rxml_using_action
render :action => "hello_world_from_rxml", :handlers => [:builder]
end
# :deprecated:
def hello_world_from_rxml_using_template
render :template => "test/hello_world_from_rxml", :handlers => [:builder]
end
def head_created
head :created
end
def head_created_with_application_json_content_type
head :created, :content_type => "application/json"
end
def head_ok_with_image_png_content_type
head :ok, :content_type => "image/png"
end
def head_with_location_header
head :location => "/foo"
end
def head_with_location_object
head :location => Customer.new("david", 1)
end
def head_with_symbolic_status
head :status => params[:status].intern
end
def head_with_integer_status
head :status => params[:status].to_i
end
def head_with_string_status
head :status => params[:status]
end
def head_with_custom_header
head :x_custom_header => "something"
end
def head_with_www_authenticate_header
head 'WWW-Authenticate' => 'something'
end
def head_with_status_code_first
head :forbidden, :x_custom_header => "something"
end
def head_and_return
head :ok and return
raise 'should not reach this line'
end
def head_with_no_content
# Fill in the headers with dummy data to make
# sure they get removed during the testing
response.headers["Content-Type"] = "dummy"
response.headers["Content-Length"] = 42
head 204
end
private
def set_variable_for_layout
@variable_for_layout = nil
end
def determine_layout
case action_name
when "hello_world", "layout_test", "rendering_without_layout",
"rendering_nothing_on_layout", "render_text_hello_world",
"render_text_hello_world_with_layout",
"hello_world_with_layout_false",
"partial_only", "accessing_params_in_template",
"accessing_params_in_template_with_layout",
"render_with_explicit_template",
"render_with_explicit_string_template",
"update_page", "update_page_with_instance_variables"
"layouts/standard"
when "action_talk_to_layout", "layout_overriding_layout"
"layouts/talk_from_action"
when "render_implicit_html_template_from_xhr_request"
(request.xhr? ? 'layouts/xhr' : 'layouts/standard')
end
end
end
class MetalTestController < ActionController::Metal
include AbstractController::Rendering
include ActionView::Rendering
include ActionController::Rendering
include ActionController::RackDelegation
def accessing_logger_in_template
render :inline => "<%= logger.class %>"
end
end
class ExpiresInRenderTest < ActionController::TestCase
tests TestController
def test_expires_in_header
get :conditional_hello_with_expires_in
assert_equal "max-age=60, private", @response.headers["Cache-Control"]
end
def test_expires_in_header_with_public
get :conditional_hello_with_expires_in_with_public
assert_equal "max-age=60, public", @response.headers["Cache-Control"]
end
def test_expires_in_header_with_must_revalidate
get :conditional_hello_with_expires_in_with_must_revalidate
assert_equal "max-age=60, private, must-revalidate", @response.headers["Cache-Control"]
end
def test_expires_in_header_with_public_and_must_revalidate
get :conditional_hello_with_expires_in_with_public_and_must_revalidate
assert_equal "max-age=60, public, must-revalidate", @response.headers["Cache-Control"]
end
def test_expires_in_header_with_additional_headers
get :conditional_hello_with_expires_in_with_public_with_more_keys
assert_equal "max-age=60, public, s-maxage=18000", @response.headers["Cache-Control"]
end
def test_expires_in_old_syntax
get :conditional_hello_with_expires_in_with_public_with_more_keys_old_syntax
assert_equal "max-age=60, public, s-maxage=18000", @response.headers["Cache-Control"]
end
def test_expires_now
get :conditional_hello_with_expires_now
assert_equal "no-cache", @response.headers["Cache-Control"]
end
def test_expires_now_with_cache_control_headers
get :conditional_hello_with_cache_control_headers
assert_match(/no-cache/, @response.headers["Cache-Control"])
assert_match(/no-transform/, @response.headers["Cache-Control"])
end
def test_date_header_when_expires_in
time = Time.mktime(2011,10,30)
Time.stubs(:now).returns(time)
get :conditional_hello_with_expires_in
assert_equal Time.now.httpdate, @response.headers["Date"]
end
end
class LastModifiedRenderTest < ActionController::TestCase
tests TestController
def setup
super
@last_modified = Time.now.utc.beginning_of_day.httpdate
end
def test_responds_with_last_modified
get :conditional_hello
assert_equal @last_modified, @response.headers['Last-Modified']
end
def test_request_not_modified
@request.if_modified_since = @last_modified
get :conditional_hello
assert_equal 304, @response.status.to_i
assert @response.body.blank?
assert_equal @last_modified, @response.headers['Last-Modified']
end
def test_request_not_modified_but_etag_differs
@request.if_modified_since = @last_modified
@request.if_none_match = "234"
get :conditional_hello
assert_response :success
end
def test_request_modified
@request.if_modified_since = 'Thu, 16 Jul 2008 00:00:00 GMT'
get :conditional_hello
assert_equal 200, @response.status.to_i
assert @response.body.present?
assert_equal @last_modified, @response.headers['Last-Modified']
end
def test_responds_with_last_modified_with_record
get :conditional_hello_with_record
assert_equal @last_modified, @response.headers['Last-Modified']
end
def test_request_not_modified_with_record
@request.if_modified_since = @last_modified
get :conditional_hello_with_record
assert_equal 304, @response.status.to_i
assert @response.body.blank?
assert_equal @last_modified, @response.headers['Last-Modified']
end
def test_request_not_modified_but_etag_differs_with_record
@request.if_modified_since = @last_modified
@request.if_none_match = "234"
get :conditional_hello_with_record
assert_response :success
end
def test_request_modified_with_record
@request.if_modified_since = 'Thu, 16 Jul 2008 00:00:00 GMT'
get :conditional_hello_with_record
assert_equal 200, @response.status.to_i
assert @response.body.present?
assert_equal @last_modified, @response.headers['Last-Modified']
end
def test_request_with_bang_gets_last_modified
get :conditional_hello_with_bangs
assert_equal @last_modified, @response.headers['Last-Modified']
assert_response :success
end
def test_request_with_bang_obeys_last_modified
@request.if_modified_since = @last_modified
get :conditional_hello_with_bangs
assert_response :not_modified
end
def test_last_modified_works_with_less_than_too
@request.if_modified_since = 5.years.ago.httpdate
get :conditional_hello_with_bangs
assert_response :success
end
end
class EtagRenderTest < ActionController::TestCase
tests TestControllerWithExtraEtags
def test_multiple_etags
@request.if_none_match = etag(["123", 'ab', :cde, [:f]])
get :fresh
assert_response :not_modified
@request.if_none_match = %("nomatch")
get :fresh
assert_response :success
end
def test_array
@request.if_none_match = etag([%w(1 2 3), 'ab', :cde, [:f]])
get :array
assert_response :not_modified
@request.if_none_match = %("nomatch")
get :array
assert_response :success
end
def test_etag_reflects_template_digest
get :with_template
assert_response :ok
assert_not_nil etag = @response.etag
request.if_none_match = etag
get :with_template
assert_response :not_modified
# Modify the template digest
path = File.expand_path('../../fixtures/test/hello_world.erb', __FILE__)
old = File.read(path)
begin
File.write path, 'foo'
ActionView::Digestor.cache.clear
request.if_none_match = etag
get :with_template
assert_response :ok
assert_not_equal etag, @response.etag
ensure
File.write path, old
end
end
def etag(record)
Digest::MD5.hexdigest(ActiveSupport::Cache.expand_cache_key(record)).inspect
end
end
class MetalRenderTest < ActionController::TestCase
tests MetalTestController
def test_access_to_logger_in_view
get :accessing_logger_in_template
assert_equal "NilClass", @response.body
end
end
class HeadRenderTest < ActionController::TestCase
tests TestController
def setup
@request.host = "www.nextangle.com"
end
def test_head_created
post :head_created
assert @response.body.blank?
assert_response :created
end
def test_head_created_with_application_json_content_type
post :head_created_with_application_json_content_type
assert @response.body.blank?
assert_equal "application/json", @response.header["Content-Type"]
assert_response :created
end
def test_head_ok_with_image_png_content_type
post :head_ok_with_image_png_content_type
assert @response.body.blank?
assert_equal "image/png", @response.header["Content-Type"]
assert_response :ok
end
def test_head_with_location_header
get :head_with_location_header
assert @response.body.blank?
assert_equal "/foo", @response.headers["Location"]
assert_response :ok
end
def test_head_with_location_object
with_routing do |set|
set.draw do
resources :customers
get ':controller/:action'
end
get :head_with_location_object
assert @response.body.blank?
assert_equal "http://www.nextangle.com/customers/1", @response.headers["Location"]
assert_response :ok
end
end
def test_head_with_custom_header
get :head_with_custom_header
assert @response.body.blank?
assert_equal "something", @response.headers["X-Custom-Header"]
assert_response :ok
end
def test_head_with_www_authenticate_header
get :head_with_www_authenticate_header
assert @response.body.blank?
assert_equal "something", @response.headers["WWW-Authenticate"]
assert_response :ok
end
def test_head_with_symbolic_status
get :head_with_symbolic_status, :status => "ok"
assert_equal 200, @response.status
assert_response :ok
get :head_with_symbolic_status, :status => "not_found"
assert_equal 404, @response.status
assert_response :not_found
get :head_with_symbolic_status, :status => "no_content"
assert_equal 204, @response.status
assert [email protected]?('Content-Length')
assert_response :no_content
Rack::Utils::SYMBOL_TO_STATUS_CODE.each do |status, code|
get :head_with_symbolic_status, :status => status.to_s
assert_equal code, @response.response_code
assert_response status
end
end
def test_head_with_integer_status
Rack::Utils::HTTP_STATUS_CODES.each do |code, message|
get :head_with_integer_status, :status => code.to_s
assert_equal message, @response.message
end
end
def test_head_with_no_content
get :head_with_no_content
assert_equal 204, @response.status
assert_nil @response.headers["Content-Type"]
assert_nil @response.headers["Content-Length"]
end
def test_head_with_string_status
get :head_with_string_status, :status => "404 Eat Dirt"
assert_equal 404, @response.response_code
assert_equal "Not Found", @response.message
assert_response :not_found
end
def test_head_with_status_code_first
get :head_with_status_code_first
assert_equal 403, @response.response_code
assert_equal "Forbidden", @response.message
assert_equal "something", @response.headers["X-Custom-Header"]
assert_response :forbidden
end
def test_head_returns_truthy_value
assert_nothing_raised do
get :head_and_return
end
end
end
| 27.445008 | 101 | 0.737608 |
ed1dbe15d65b41e432d42b973ffa8602ceba5c6b | 23,388 | # frozen_string_literal: true
require 'spec_helper'
describe "Internal Project Access" do
include AccessMatchers
set(:project) { create(:project, :internal, :repository) }
describe "Project should be internal" do
describe '#internal?' do
subject { project.internal? }
it { is_expected.to be_truthy }
end
end
describe "GET /:project_path" do
subject { project_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/tree/master" do
subject { project_tree_path(project, project.repository.root_ref) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/commits/master" do
subject { project_commits_path(project, project.repository.root_ref, limit: 1) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/commit/:sha" do
subject { project_commit_path(project, project.repository.commit) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/compare" do
subject { project_compare_index_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/settings/members" do
subject { project_settings_members_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:visitor) }
it { is_expected.to be_denied_for(:external) }
end
describe "GET /:project_path/-/settings/ci_cd" do
subject { project_settings_ci_cd_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_denied_for(:developer).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:visitor) }
it { is_expected.to be_denied_for(:external) }
end
describe "GET /:project_path/-/settings/repository" do
subject { project_settings_repository_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_denied_for(:developer).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:visitor) }
it { is_expected.to be_denied_for(:external) }
end
describe "GET /:project_path/-/blob" do
let(:commit) { project.repository.commit }
subject { project_blob_path(project, File.join(commit.id, '.gitignore')) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/edit" do
subject { edit_project_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_denied_for(:developer).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/deploy_keys" do
subject { project_deploy_keys_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_denied_for(:developer).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/issues" do
subject { project_issues_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/issues/:id/edit" do
let(:issue) { create(:issue, project: project) }
subject { edit_project_issue_path(project, issue) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/snippets" do
subject { project_snippets_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/snippets/new" do
subject { new_project_snippet_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/merge_requests" do
subject { project_merge_requests_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/merge_requests/new" do
subject { project_new_merge_request_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/branches" do
subject { project_branches_path(project) }
before do
# Speed increase
allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:branches).and_return([])
end
end
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/tags" do
subject { project_tags_path(project) }
before do
# Speed increase
allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:tags).and_return([])
end
end
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/settings/integrations" do
subject { project_settings_integrations_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_denied_for(:developer).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/pipelines" do
subject { project_pipelines_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/pipelines/:id" do
let(:pipeline) { create(:ci_pipeline, project: project) }
subject { project_pipeline_path(project, pipeline) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/builds" do
subject { project_jobs_path(project) }
context "when allowed for public and internal" do
before do
project.update(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
context "when disallowed for public and internal" do
before do
project.update(public_builds: false)
end
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
end
describe "GET /:project_path/builds/:id" do
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:build) { create(:ci_build, pipeline: pipeline) }
subject { project_job_path(project, build.id) }
context "when allowed for public and internal" do
before do
project.update(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
context "when disallowed for public and internal" do
before do
project.update(public_builds: false)
end
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
end
describe 'GET /:project_path/builds/:id/trace' do
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:build) { create(:ci_build, pipeline: pipeline) }
subject { trace_project_job_path(project, build.id) }
context 'when allowed for public and internal' do
before do
project.update(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
context 'when disallowed for public and internal' do
before do
project.update(public_builds: false)
end
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
end
describe "GET /:project_path/pipeline_schedules" do
subject { project_pipeline_schedules_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/environments" do
subject { project_environments_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/environments/:id" do
let(:environment) { create(:environment, project: project) }
subject { project_environment_path(project, environment) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/environments/:id/deployments" do
let(:environment) { create(:environment, project: project) }
subject { project_environment_deployments_path(project, environment) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/-/environments/new" do
subject { new_project_environment_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_denied_for(:reporter).of(project) }
it { is_expected.to be_denied_for(:guest).of(project) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
describe "GET /:project_path/container_registry" do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_tags(repository: :any, tags: ['latest'])
stub_container_registry_config(enabled: true)
project.container_repositories << container_repository
end
subject { project_container_registry_index_path(project) }
it { is_expected.to be_allowed_for(:admin) }
it { is_expected.to be_allowed_for(:owner).of(project) }
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
it { is_expected.to be_allowed_for(:guest).of(project) }
it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
end
| 42.064748 | 84 | 0.702583 |
38af436d4d48cc432b4db0d2c0c9d0de2df11aab | 120 | default['sls_utils']['yum_install']['epel_release'] = '8'
default['sls_utils']['yum_install']['ius_release'] = '1.0-14'
| 40 | 61 | 0.683333 |
f72082e69676a9294a82e1c0eef87f9801293ef4 | 7,160 | enable :sessions
set :session_secret, (ENV['SESSION_SECRET'] || 'superSecret!1!')
DataMapper.auto_upgrade!
get '/' do
SUBMISSION_COUNT = 20
if session['user_id']
@logged_in = true
# Submission name format: Problem Name + File Name
user = User.get(session['user_id'])
# If the user is on a team, pull all of the team's submissions
if user.team
@user_submissions = Submission.all(
Submission.user.team.id => user.team.id,
:order => [ :time.desc ],
:limit => SUBMISSION_COUNT)
else
@user_submissions = Submission.all(
Submission.user.id => session['user_id'],
:order => [ :time.desc ],
:limit => SUBMISSION_COUNT)
end
end
# Submission name format: Team Name + Problem Name
@submissions = Submission.all(
:order => [ :time.desc ],
:limit => SUBMISSION_COUNT,
)
# pull scoreboard data from DB
@scoreboard = Team.all(:order => [ :score.desc ])
# Test Data
#@scoreboard.push({ :name => 'Test Team', :score => 5 })
#if @user_submissions
# @user_submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 0, :filename => 'test.rb' )
# @user_submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 1, :filename => 'test.rb' )
# @user_submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 2, :filename => 'test.rb' )
# @user_submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 3, :filename => 'test.rb' )
# @user_submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 4, :filename => 'test.rb' )
#end
#@submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 0 )
#@submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 1 )
#@submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 2 )
#@submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 3 )
#@submissions.push( :user => { :name => 'fake' }, :problem => { :name => 'Ones' }, :result => 4 )
erb :landing
end
get '/about' do
if session['user_id']
@logged_in = true
end
@languages = [
{ :name => "GCC", :cmd => "gcc --version" },
{ :name => "Java", :cmd => "javac -version 2>&1" },
{ :name => "Python", :cmd => "python -V 2>&1" },
{ :name => "Perl", :cmd => "perl -v" },
{ :name => "Ruby", :cmd => "ruby --version" },
{ :name => "Lua", :cmd => "lua -v" },
{ :name => "Bash", :cmd => "bash --version" }
]
@languages.each do |language|
language[:version] = %x{ #{language[:cmd]} }
if $?.exitstatus != 0
language[:version] = "Error retrieving version"
elsif language[:version] === ""
language[:version] = "#{language[:cmd]} returned empty version string"
end
end
erb :about
end
get '/problem' do
if session['user_id']
@logged_in = true
end
# TODO: pull current problem name from DB
@problem = Problem.first(:name => "Ones")
erb :problem
end
post '/problem' do
# TODO: pull current problem name from DB
@problem = Problem.first(:name => "Ones")
if session['user_id']
@logged_in = true
user = User.get(session['user_id'])
now = Time.now()
# Create upload directories
user_dir = File.join($upload_dir, user.id.to_s())
user_archive_dir = File.join($upload_dir, 'archive', user.id.to_s())
FileUtils.mkdir_p([user_dir, user_archive_dir])
# Grader input file
filename = File.join(user_dir, params['file'][:filename])
File.open(filename, "w") do |f|
f.write(params['file'][:tempfile].read)
end
# Archive file
archive = File.join(user_archive_dir, now.strftime("%F-%T") + '-' + params['file'][:filename])
FileUtils.cp(filename, archive)
# Create database entry
s = Submission.new(:time => now,
:filename => params['file'][:filename],
:archive => archive,
:problem => @problem,
:user => user)
if not s.save
@error = "Failed to save Submission: " + s.errors.join(', ')
end
erb :problem
else
# if the user isn't logged in, throw away their upload and send them to
# the login page
redirect to('/login')
end
end
get '/settings' do
if session['user_id']
# TODO
# - team creation
# - request to join a team
# - email change
# - username change
# - password change
@logged_in = true
@user = User.get(session['user_id'])
if @user
return erb :settings
end
end
redirect to('/login')
end
get '/login' do
erb :login
end
post '/login' do
@login = { :name => params['name'], :pass => params['pass'] }
if params['name'] and params['pass']
user = User.first(:name => params['name'])
if user and user.pw_hash == params['pass']
session['user_id'] = user.id
redirect to('/')
else
@error = "Bad username or password"
end
end
erb :login
end
get '/logout' do
session.clear
redirect '/'
end
get '/signup' do
erb :signup
end
post '/signup' do
@user = User.new(:name => params['name'],
:email => params['email'])
error = ! @user.valid?
if ! params['password'] or params['password'].length == 0
error = true
@user.errors.add(:password, "Password must not be blank")
elsif ! params['password_confirm'] or params['password_confirm'].length == 0
error = true
@user.errors.add(:password_confirm, "Password must not be blank")
elsif params['password'] != params['password_confirm']
error = true
@user.errors.add(:password_confirm, "Passwords must match")
end
if error
@error = "Please correct the following problems"
erb :signup
else
@user.pw_hash = params['password']
if @user.save
session['user_id'] = @user.id
redirect to('/')
else
@error = "Error creating user"
@user.errors.each do |err|
@error += "; "
@error += err.join("; ")
end
erb :signup
end
end
end
class Form
def initialize(obj)
@obj = obj
end
def input(field, options={})
options = { :type => "text", :label => "" }.merge(options)
type = options[:type]
label = options[:label]
val = ""
if @obj
val = @obj[field]
end
html = <<EOF
<div class="input_row">
<label for="#{field}">#{label}</label>
<input type="#{type}" name="#{field}" value="#{val}"/>
EOF
if @obj and @obj.respond_to?(:errors) and @obj.errors[field] and
@obj.errors[field].length > 0
err = @obj.errors[field].join("; ")
html += <<EOF
<div class="error">#{err}</div>
EOF
end
html += "</div>"
html
end
def submit(options={})
opts = { :label => "Submit" }.merge(options)
<<EOF
<div class="input_row">
<input type="submit" value="#{opts[:label]}" class="btn btn-large btn-primary"/>
</div>
EOF
end
end
helpers do
def form_for(obj, &block)
block.call(Form.new(obj))
end
end
| 27.328244 | 130 | 0.571648 |
388bee023d9054811cbdfa2f9dd2f7dc5ff39ae3 | 1,249 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module GoogleSpeechToTextDemo
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Set Default Encoding
config.encoding = 'utf-8'
# Enable the asset pipeline
config.assets.enabled = true
# config.assets.paths += %w(/app/assets /vendor/assets, /public/assets)
# config.assets.precompile += %w(recorder.js speech.js chatbox.css speech.css)
config.assets.digest = true
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# For Heroku deployments.
config.assets.initialize_on_precompile = true
config.middleware.insert_before 0, Rack::Cors do
allow do
origins '*'
resource '*', headers: :any, methods: [:get, :post, :options]
end
end
end
end
| 29.738095 | 82 | 0.713371 |
87acfd267267d08720c031b4778e28014b1a1e4c | 717 | class CreatePapers < ActiveRecord::Migration[5.2]
def up
create_table :papers do |t|
t.string :paper_id
t.string :title
t.text :abstract
t.tsvector :search_vector
t.timestamps
end
execute "CREATE INDEX abstracts_search_idx ON papers USING gin(search_vector);"
execute "DROP TRIGGER IF EXISTS papers_search_vector_update ON papers;"
execute "CREATE TRIGGER papers_search_vector_update BEFORE INSERT OR UPDATE ON papers FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(search_vector, 'pg_catalog.english', title, abstract);"
end
def down
drop_table :papers
execute "DROP TRIGGER IF EXISTS papers_search_vector_update on papers;"
end
end
| 34.142857 | 200 | 0.739191 |
bf3b0aa13eb34ead1b7c439afcf97f3418f9bacb | 1,408 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'etfc/version'
Gem::Specification.new do |spec|
spec.name = 'etfc'
spec.version = ETFC::VERSION
spec.authors = ['Maximilian Haack']
spec.email = ['[email protected]']
spec.summary = 'A flickr collage maker'
spec.description = 'ETFC is a tool to create collages from flickr ' \
'images based upon user provided keywords.'
spec.homepage = 'https://www.github.com/coffeejunk/etfc'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_runtime_dependency 'flickraw', '~> 0.9.9'
spec.add_runtime_dependency 'rmagick', '~> 4.1.0.rc2'
spec.add_runtime_dependency 'thor', '~> 0.19.4'
spec.add_development_dependency 'bundler', '~> 2.0'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'rubocop', '~> 0.46.0'
spec.add_development_dependency 'simplecov', '~> 0.12.0'
spec.add_development_dependency 'webmock', '~> 2.3.1'
spec.add_development_dependency 'vcr', '~> 3.0.3'
end
| 38.054054 | 74 | 0.647017 |
e2aaa5dc3d4312079b0735131e65e81a7cf0acce | 370 | class User < ActiveRecord::Base
include Adauth::Rails::ModelBridge
AdauthMappings = {
:login => :login,
:group_strings => :cn_groups
}
AdauthSearchField = [ :login, :login ]
#validates :first_name, :last_name, :login, :email, :active, presence: true
validates :first_name, :last_name, :login, :email, presence: true
has_many :reservations, inverse_of: :user
end
| 23.125 | 75 | 0.72973 |
e2715f2458df71243ca728143ddd793300b31683 | 9,900 | =begin
#SendinBlue API
#SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.18
=end
require 'date'
module SibApiV3Sdk
class SendTransacSms
# Name of the sender. **The number of characters is limited to 11 for alphanumeric characters and 15 for numeric characters**
attr_accessor :sender
# Mobile number to send SMS with the country code
attr_accessor :recipient
# Content of the message. If more than 160 characters long, will be sent as multiple text messages
attr_accessor :content
# Type of the SMS. Marketing SMS messages are those sent typically with marketing content. Transactional SMS messages are sent to individuals and are triggered in response to some action, such as a sign-up, purchase, etc.
attr_accessor :type
# Tag of the message
attr_accessor :tag
# Webhook to call for each event triggered by the message (delivered etc.)
attr_accessor :web_url
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'sender' => :'sender',
:'recipient' => :'recipient',
:'content' => :'content',
:'type' => :'type',
:'tag' => :'tag',
:'web_url' => :'webUrl'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'sender' => :'String',
:'recipient' => :'String',
:'content' => :'String',
:'type' => :'String',
:'tag' => :'String',
:'web_url' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'sender')
self.sender = attributes[:'sender']
end
if attributes.has_key?(:'recipient')
self.recipient = attributes[:'recipient']
end
if attributes.has_key?(:'content')
self.content = attributes[:'content']
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
else
self.type = 'transactional'
end
if attributes.has_key?(:'tag')
self.tag = attributes[:'tag']
end
if attributes.has_key?(:'webUrl')
self.web_url = attributes[:'webUrl']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @sender.nil?
invalid_properties.push('invalid value for "sender", sender cannot be nil.')
end
if @sender.to_s.length > 15
invalid_properties.push('invalid value for "sender", the character length must be smaller than or equal to 15.')
end
if @recipient.nil?
invalid_properties.push('invalid value for "recipient", recipient cannot be nil.')
end
if @content.nil?
invalid_properties.push('invalid value for "content", content cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @sender.nil?
return false if @sender.to_s.length > 15
return false if @recipient.nil?
return false if @content.nil?
type_validator = EnumAttributeValidator.new('String', ['transactional', 'marketing'])
return false unless type_validator.valid?(@type)
true
end
# Custom attribute writer method with validation
# @param [Object] sender Value to be assigned
def sender=(sender)
if sender.nil?
fail ArgumentError, 'sender cannot be nil'
end
if sender.to_s.length > 15
fail ArgumentError, 'invalid value for "sender", the character length must be smaller than or equal to 15.'
end
@sender = sender
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] type Object to be assigned
def type=(type)
validator = EnumAttributeValidator.new('String', ['transactional', 'marketing'])
unless validator.valid?(type)
fail ArgumentError, 'invalid value for "type", must be one of #{validator.allowable_values}.'
end
@type = type
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
sender == o.sender &&
recipient == o.recipient &&
content == o.content &&
type == o.type &&
tag == o.tag &&
web_url == o.web_url
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[sender, recipient, content, type, tag, web_url].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = SibApiV3Sdk.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.352941 | 839 | 0.618586 |
4a96e141a31742f4e621434b763be52118b68452 | 74 | module Rack
module AcceptDefaultVersion
VERSION = '0.0.1'
end
end
| 12.333333 | 29 | 0.702703 |
f86209e3c79b390eec0d7e55278b9a6f39335e50 | 5,048 | # encoding: utf-8
require_relative './relocator'
module CartoDB
module Relocator
class Worker
def self.perform(args = {})
puts args
job = CartoDB::Relocator::Job.find(args["id"])
dump(job) if job.type == "dump"
load(job) if job.type == "load"
end #work
def self.organize(user,org)
port = ::Rails::Sequel.configuration.environment_for(Rails.env)['port']
relocator = CartoDB::Relocator::Relocation.new(
source: {conn: {host: user.database_host, port: port,
dbname: user.database_name,
user: 'postgres'}, schema: user.username}, #we will move 'public' to this schema
target: {conn: {host: org.owner.database_host, port: port,
dbname: org.owner.database_name, user: 'postgres'}, schema: user.username},
redis: {host: Cartodb.config[:redis]['host'], port: Cartodb.config[:redis]['port']},
dbname: user.database_name, username: user.database_username, :mode => :organize,
user_object: user
)
begin
# --------------- we first move the user to its own schema
case user.database_schema
when 'public'
#associate it to the organization now so it lets create the public_user
#on the schema.
user.organization = org
user.database_schema = user.username
user.in_database(as: :superuser) do |database|
database['ALTER SCHEMA public RENAME TO '+user.username].all
# An apple a day keeps PostGIS away
database['CREATE SCHEMA public; ALTER EXTENSION postgis SET SCHEMA public'].all
end
user.set_database_search_path
user.create_public_db_user
user.organization = nil
user.save_metadata
user.setup_schema
User.terminate_database_connections(user.database_name, user.database_host)
user.save
puts "Migrated to schema-powered successfully!"
when user.username
puts "User is already on its own, non-public schema."
else
raise "User is on a different schema than expected."
end
# --------------- then move the user to its new place
user.database_host = org.owner.database_host
user.database_schema = user.username
user.database_name = org.owner.database_name
user.organization = org
Thread.new do
begin
user.create_db_user
rescue => e
puts "Error #{e} while creating user. Ignoring as it probably already existed"
end
user.grant_user_in_database
end.join
begin
user.create_public_db_user
rescue => e
puts "Error #{e} while creating public user. Ignoring as it probably already existed"
end
user.monitor_user_notification
user.create_user_schema
user.set_database_search_path
#User.terminate_database_connections(user.database_name, user.database_host)
relocator.migrate
user.setup_schema
#wipe all OIDs
user.tables.update({:table_id =>nil})
relocator.compare
relocator.finalize
relocator.check_org_user(user)
rescue => e
puts "Error: #{e}, #{e.backtrace}"
puts "Rolling back in 5 secs"
sleep 5
relocator.rollback
return
end
user.save
user.create_in_central
user.update_in_central
end # organize
def self.relocate(user, new_database_host, new_database_port=nil)
port = ::Rails::Sequel.configuration.environment_for(Rails.env)['port']
new_database_port ||= port
relocator = CartoDB::Relocator::Relocation.new(
source: {conn: {host: user.database_host, port: port,
dbname: user.database_name,
user: 'postgres'}},
target: {conn: {host: new_database_host, port: new_database_port,
dbname: user.database_name, user: 'postgres'}},
redis: {host: Cartodb.config[:redis]['host'], port: Cartodb.config[:redis]['port']},
dbname: user.database_name, username: user.database_username,
user_object: user, mode: :relocate
)
begin
user.database_host = new_database_host
relocator.setup
user.monitor_user_notification
relocator.migrate
user.set_statement_timeouts
relocator.compare
puts user.save #this will terminate all connections
relocator.finalize
rescue => e
puts "Error: #{e}, #{e.backtrace}"
puts "Rolling back (changing back database_host and dropping triggers) in 5 secs"
sleep 5
relocator.rollback
end
end # dump
end
end
end
| 39.4375 | 106 | 0.587361 |
ed4ebf5b5bc1bb45656570ccc27ff11ecf8df097 | 586 | class User
include Dynamoid::Document
field :name
field :email
field :password
field :admin, :boolean
field :last_logged_in_at, :datetime
field :favorite_colors, :serialized
field :todo_list, :array
has_and_belongs_to_many :subscriptions
has_many :books, class_name: 'Magazine', inverse_of: :owner
has_one :monthly, class_name: 'Subscription', inverse_of: :customer
has_and_belongs_to_many :followers, class_name: 'User', inverse_of: :following
has_and_belongs_to_many :following, class_name: 'User', inverse_of: :followers
belongs_to :camel_case
end
| 24.416667 | 80 | 0.766212 |
bf5ee4369a613723c6c4ec2a177b5a69e5022a86 | 256 | Given(/^a story "(.*?)"$/) do |title|
account(name: "Story Account") do |account|
@story = sample_entry(account: account, title: title)
end
end
When(/^I publish the story$/) do
click_on "Publish"
choose "indefinitely"
click_on "Publish"
end
| 21.333333 | 57 | 0.664063 |
62ef93f8c5019fdc34bfc7b920f5a0f76aa10623 | 8,947 | # Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: AGPL-3.0
class CollectionUseRegularUuids < ActiveRecord::Migration[4.2]
def up
add_column :collections, :name, :string
add_column :collections, :description, :string
add_column :collections, :properties, :text
add_column :collections, :expires_at, :date
remove_column :collections, :locator
say_with_time "Step 1. Move manifest hashes into portable_data_hash field" do
ActiveRecord::Base.connection.execute("update collections set portable_data_hash=uuid, uuid=null")
end
say_with_time "Step 2. Create new collection objects from the name links in the table." do
from_clause = %{
from links inner join collections on head_uuid=collections.portable_data_hash
where link_class='name' and collections.uuid is null
}
links = ActiveRecord::Base.connection.select_all %{
select links.uuid, head_uuid, tail_uuid, links.name,
manifest_text, links.created_at, links.modified_at, links.modified_by_client_uuid, links.modified_by_user_uuid
#{from_clause}
}
links.each do |d|
ActiveRecord::Base.connection.execute %{
insert into collections (uuid, portable_data_hash, owner_uuid, name, manifest_text, created_at, modified_at, modified_by_client_uuid, modified_by_user_uuid, updated_at)
values (#{ActiveRecord::Base.connection.quote Collection.generate_uuid},
#{ActiveRecord::Base.connection.quote d['head_uuid']},
#{ActiveRecord::Base.connection.quote d['tail_uuid']},
#{ActiveRecord::Base.connection.quote d['name']},
#{ActiveRecord::Base.connection.quote d['manifest_text']},
#{ActiveRecord::Base.connection.quote d['created_at']},
#{ActiveRecord::Base.connection.quote d['modified_at']},
#{ActiveRecord::Base.connection.quote d['modified_by_client_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_by_user_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_at']})
}
end
ActiveRecord::Base.connection.execute "delete from links where links.uuid in (select links.uuid #{from_clause})"
end
say_with_time "Step 3. Create new collection objects from the can_read links in the table." do
from_clause = %{
from links inner join collections on head_uuid=collections.portable_data_hash
where link_class='permission' and links.name='can_read' and collections.uuid is null
}
links = ActiveRecord::Base.connection.select_all %{
select links.uuid, head_uuid, tail_uuid, manifest_text, links.created_at, links.modified_at
#{from_clause}
}
links.each do |d|
ActiveRecord::Base.connection.execute %{
insert into collections (uuid, portable_data_hash, owner_uuid, manifest_text, created_at, modified_at, modified_by_client_uuid, modified_by_user_uuid, updated_at)
values (#{ActiveRecord::Base.connection.quote Collection.generate_uuid},
#{ActiveRecord::Base.connection.quote d['head_uuid']},
#{ActiveRecord::Base.connection.quote d['tail_uuid']},
#{ActiveRecord::Base.connection.quote d['manifest_text']},
#{ActiveRecord::Base.connection.quote d['created_at']},
#{ActiveRecord::Base.connection.quote d['modified_at']},
#{ActiveRecord::Base.connection.quote d['modified_by_client_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_by_user_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_at']})
}
end
ActiveRecord::Base.connection.execute "delete from links where links.uuid in (select links.uuid #{from_clause})"
end
say_with_time "Step 4. Migrate remaining orphan collection objects" do
links = ActiveRecord::Base.connection.select_all %{
select portable_data_hash, owner_uuid, manifest_text, created_at, modified_at
from collections
where uuid is null and portable_data_hash not in (select portable_data_hash from collections where uuid is not null)
}
links.each do |d|
ActiveRecord::Base.connection.execute %{
insert into collections (uuid, portable_data_hash, owner_uuid, manifest_text, created_at, modified_at, modified_by_client_uuid, modified_by_user_uuid, updated_at)
values (#{ActiveRecord::Base.connection.quote Collection.generate_uuid},
#{ActiveRecord::Base.connection.quote d['portable_data_hash']},
#{ActiveRecord::Base.connection.quote d['owner_uuid']},
#{ActiveRecord::Base.connection.quote d['manifest_text']},
#{ActiveRecord::Base.connection.quote d['created_at']},
#{ActiveRecord::Base.connection.quote d['modified_at']},
#{ActiveRecord::Base.connection.quote d['modified_by_client_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_by_user_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_at']})
}
end
end
say_with_time "Step 5. Delete old collection objects." do
ActiveRecord::Base.connection.execute("delete from collections where uuid is null")
end
say_with_time "Step 6. Delete permission links where tail_uuid is a collection (invalid records)" do
ActiveRecord::Base.connection.execute %{
delete from links where links.uuid in (select links.uuid
from links
where tail_uuid like '________________________________+%' and link_class='permission' )
}
end
say_with_time "Step 7. Migrate collection -> collection provenance links to jobs" do
from_clause = %{
from links
where head_uuid like '________________________________+%' and tail_uuid like '________________________________+%' and links.link_class = 'provenance'
}
links = ActiveRecord::Base.connection.select_all %{
select links.uuid, head_uuid, tail_uuid, links.created_at, links.modified_at, links.modified_by_client_uuid, links.modified_by_user_uuid, links.owner_uuid
#{from_clause}
}
links.each do |d|
newuuid = Job.generate_uuid
ActiveRecord::Base.connection.execute %{
insert into jobs (uuid, script_parameters, output, running, success, created_at, modified_at, modified_by_client_uuid, modified_by_user_uuid, owner_uuid, updated_at)
values (#{ActiveRecord::Base.connection.quote newuuid},
#{ActiveRecord::Base.connection.quote "---\ninput: "+d['tail_uuid']},
#{ActiveRecord::Base.connection.quote d['head_uuid']},
#{ActiveRecord::Base.connection.quote false},
#{ActiveRecord::Base.connection.quote true},
#{ActiveRecord::Base.connection.quote d['created_at']},
#{ActiveRecord::Base.connection.quote d['modified_at']},
#{ActiveRecord::Base.connection.quote d['modified_by_client_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_by_user_uuid']},
#{ActiveRecord::Base.connection.quote d['owner_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_at']})
}
end
ActiveRecord::Base.connection.execute "delete from links where links.uuid in (select links.uuid #{from_clause})"
end
say_with_time "Step 8. Migrate remaining links with head_uuid pointing to collections" do
from_clause = %{
from links inner join collections on links.head_uuid=portable_data_hash
where collections.uuid is not null
}
links = ActiveRecord::Base.connection.select_all %{
select links.uuid, collections.uuid as collectionuuid, tail_uuid, link_class, links.properties,
links.name, links.created_at, links.modified_at, links.modified_by_client_uuid, links.modified_by_user_uuid, links.owner_uuid
#{from_clause}
}
links.each do |d|
ActiveRecord::Base.connection.execute %{
insert into links (uuid, head_uuid, tail_uuid, link_class, name, properties, created_at, modified_at, modified_by_client_uuid, modified_by_user_uuid, owner_uuid, updated_at)
values (#{ActiveRecord::Base.connection.quote Link.generate_uuid},
#{ActiveRecord::Base.connection.quote d['collectionuuid']},
#{ActiveRecord::Base.connection.quote d['tail_uuid']},
#{ActiveRecord::Base.connection.quote d['link_class']},
#{ActiveRecord::Base.connection.quote d['name']},
#{ActiveRecord::Base.connection.quote d['properties']},
#{ActiveRecord::Base.connection.quote d['created_at']},
#{ActiveRecord::Base.connection.quote d['modified_at']},
#{ActiveRecord::Base.connection.quote d['modified_by_client_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_by_user_uuid']},
#{ActiveRecord::Base.connection.quote d['owner_uuid']},
#{ActiveRecord::Base.connection.quote d['modified_at']})
}
end
ActiveRecord::Base.connection.execute "delete from links where links.uuid in (select links.uuid #{from_clause})"
end
say_with_time "Step 9. Delete any remaining name links" do
ActiveRecord::Base.connection.execute("delete from links where link_class='name'")
end
say_with_time "Step 10. Validate links table" do
links = ActiveRecord::Base.connection.select_all %{
select links.uuid, head_uuid, tail_uuid, link_class, name
from links
where head_uuid like '________________________________+%' or tail_uuid like '________________________________+%'
}
links.each do |d|
raise "Bad row #{d}"
end
end
end
def down
raise ActiveRecord::IrreversibleMigration, "Can't downmigrate changes to collections and links without potentially losing data."
end
end
| 48.362162 | 173 | 0.765061 |
8770ebb6fe7ebd2ec9f5f1506826bdcb08371d06 | 238 | # frozen_string_literal: true
require 'solidus_core'
require 'solidus_support'
require 'deface'
require 'spree_multi_domain/engine'
require 'spree_multi_domain/create_line_item_support'
require 'spree_multi_domain/show_product_support'
| 23.8 | 53 | 0.857143 |
e95242e8aff83db55584cd1d4086109ebb6ea33b | 3,630 | # frozen_string_literal: true
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Protobuf
# Wrapper message for `double`.
#
# The JSON representation for `DoubleValue` is JSON number.
# @!attribute [rw] value
# @return [::Float]
# The double value.
class DoubleValue
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Wrapper message for `float`.
#
# The JSON representation for `FloatValue` is JSON number.
# @!attribute [rw] value
# @return [::Float]
# The float value.
class FloatValue
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Wrapper message for `int64`.
#
# The JSON representation for `Int64Value` is JSON string.
# @!attribute [rw] value
# @return [::Integer]
# The int64 value.
class Int64Value
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Wrapper message for `uint64`.
#
# The JSON representation for `UInt64Value` is JSON string.
# @!attribute [rw] value
# @return [::Integer]
# The uint64 value.
class UInt64Value
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Wrapper message for `int32`.
#
# The JSON representation for `Int32Value` is JSON number.
# @!attribute [rw] value
# @return [::Integer]
# The int32 value.
class Int32Value
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Wrapper message for `uint32`.
#
# The JSON representation for `UInt32Value` is JSON number.
# @!attribute [rw] value
# @return [::Integer]
# The uint32 value.
class UInt32Value
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Wrapper message for `bool`.
#
# The JSON representation for `BoolValue` is JSON `true` and `false`.
# @!attribute [rw] value
# @return [::Boolean]
# The bool value.
class BoolValue
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Wrapper message for `string`.
#
# The JSON representation for `StringValue` is JSON string.
# @!attribute [rw] value
# @return [::String]
# The string value.
class StringValue
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Wrapper message for `bytes`.
#
# The JSON representation for `BytesValue` is JSON string.
# @!attribute [rw] value
# @return [::String]
# The bytes value.
class BytesValue
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
| 29.754098 | 74 | 0.654821 |
5d78f212cd4c234b64a056b7642270fe403bff7c | 17,589 | # Base Ruboto stuff and dependencies
require 'ruboto'
# Command-specific dependencies
require 'ruboto/sdk_versions'
require 'ruboto/util/asset_copier'
require 'ruboto/util/log_action'
require 'ruboto/util/xml_element'
require 'ruboto/util/code_formatting'
require 'ruboto/util/build'
require 'ruboto/util/verify'
require 'ruboto/util/scan_in_api'
require 'ruboto/core_ext/array'
require 'ruboto/core_ext/object'
module Ruboto
module Commands
module Base
include Ruboto::Util::Build
include Ruboto::SdkVersions
include Ruboto::Util::Verify
API_LEVEL_PATTERN = /^(android|google_apis)-(\d+)$/
API_NUMBER_PATTERN = /(\d+)/
VERSION_HELP_TEXT = "(e.g., 'android-19' or '19' for kitkat, " \
"'google_apis-23' for Android 6.0 with Google APIs)"
def self.main
Main do
mode 'init' do
require 'ruboto/util/update'
include Ruboto::Util::LogAction
include Ruboto::Util::Build
include Ruboto::Util::Update
option('path') {
# argument :required
description 'Path to where you want your app. Defaults to the last part of the package name.'
}
option('with-jruby') {
description 'Install the JRuby jars in your libs directory. Optionally set the JRuby version to install. Otherwise the latest available version is installed.'
argument :optional
cast { |v| Gem::Version.new(v) }
validate { |v| Gem::Version.correct?(v) }
}
def run
path = params['path'].value || Dir.getwd
with_jruby = params['with-jruby'].value
with_jruby = '9.2.9.0' unless with_jruby.is_a?(Gem::Version)
root = File.expand_path(path)
abort "Path (#{path}) must be to a directory that already exists." unless Dir.exist?(root)
puts "\nInitializing Android app in #{root}..."
Dir.chdir root do
update_assets
update_ruboto true
# update_icons true
update_classes nil, 'exclude'
# if with_jruby
# update_jruby true, with_jruby
# # update_dx_jar true
# end
update_core_classes 'exclude'
# log_action('Generating the default Activity and script') do
# generate_inheriting_file 'Activity', activity, package
# end
# FileUtils.touch 'bin/classes2.dex'
end
puts "\nRuboto app in #{path} initialized."
end
end
mode 'gen' do
require 'ruboto/util/update'
mode 'jruby' do
include Ruboto::Util::LogAction
include Ruboto::Util::Build
include Ruboto::Util::Update
argument('version') {
required false
description 'The JRuby version to install.'
cast { |v| Gem::Version.new(v) }
validate { |v| Gem::Version.correct?(v) }
}
def run
update_jruby true, params['version'].value
end
end
mode 'class' do
include Ruboto::Util::Build
argument('class') {
required
alternatives = Dir[File.join(Ruboto::ASSETS, "#{JAVA_SRC_DIR}/Inheriting*.java")].map { |f| File.basename(f)[10..-6] } - %w(Class)
description "the Android Class that you want: #{alternatives[0..-2].map { |c| "#{c}, " }}or #{alternatives[-1]}"
validate { |v| alternatives.include? v }
}
option('script_name') {
argument :required
description 'name of the ruby script that this class will execute. Should end in .rb. Optional.'
}
option('name') {
required
argument :required
description 'name of the class (and file). Should be CamelCase'
}
def run
name = params['name'].value
name[0..0] = name[0..0].upcase
script_name = params['script_name'].value || "#{underscore(name)}.rb"
klass = params['class'].value
generate_inheriting_file klass, name, verify_package, script_name
app_element = verify_manifest.elements['application']
if klass == 'Activity' || klass == 'Service'
tag = klass.downcase
if app_element.elements["#{tag}[@android:name='#{name}']"]
puts "#{klass} already present in manifest."
else
app_element.add_element tag, {'android:name' => "#{"#{verify_package}." if klass == 'Service'}#{name}"}
save_manifest
puts "Added #{tag} to manifest."
end
end
end
end
mode 'subclass' do
include Ruboto::Util::Build
argument('class') {
required
description 'the Android Class that you want to subclass (e.g., package.Class).'
}
option('name') {
required
argument :required
description 'name of the class (and file). Should be CamelCase'
}
option('package') {
argument :required
description 'package for the new class (if not specified, uses project package)'
}
option('method_base') {
required
validate { |i| %w(all on none abstract).include?(i) }
argument :required
description 'the base set of methods to generate (adjusted with method_include and method_exclude): all, none, abstract, on (e.g., onClick)'
}
option('method_include') {
argument :required
defaults ''
description 'additional methods to add to the base list'
}
option('method_exclude') {
argument :required
defaults ''
description 'methods to remove from the base list'
}
option('implements') {
required
argument :required
defaults ''
description 'comma separated list interfaces to implement'
}
option('force') {
argument :required
validate { |i| %w(include exclude).include?(i) }
description "force handling of added and deprecated methods (values: 'include' or 'exclude') unless individually included or excluded"
}
def run
generate_inheriting_file 'Class', params['name'].value
generate_subclass_or_interface(
%w(class name package method_base method_include method_exclude implements force).inject({}) { |h, i| h[i.to_sym] = params[i].value; h })
end
end
mode 'interface' do
include Ruboto::Util::Build
argument('interface') {
required
description 'the Android Interface that you want to implement (e.g., package.Interface).'
}
option('name') {
required
argument :required
description 'name of the class (and file) that will implement the interface. Should be CamelCase'
}
option('package') {
argument :required
description 'package for the new class (if not specified, uses project package)'
}
option('force') {
argument :required
validate { |i| %w(include exclude).include?(i) }
description "force added and deprecated interfaces (values: 'include' or 'exclude')"
}
def run
# FIXME(uwe): DEPRECATED! Remove before Ruboto version 1.0.0.
puts "\nThe use of \"ruboto gen interface\" has been deprecated. Please use\n\n ruboto gen subclass\n\ninstead.\n\n"
generate_inheriting_file 'Class', params['name'].value
generate_subclass_or_interface %w(interface name package force).inject({}) { |h, i| h[i.to_sym] = params[i].value; h }
end
end
mode 'core' do
include Ruboto::Util::Build
argument('class') {
required
validate { |i| %w(Activity Service BroadcastReceiver View PreferenceActivity TabActivity OnClickListener OnItemClickListener OnItemSelectedListener all).include?(i) }
description "Activity, Service, BroadcastReceiver, View, OnClickListener, OnItemClickListener, OnItemSelectedListener, or all (default = all); Other activities not included in 'all': PreferenceActivity, TabActivity"
}
option('method_base') {
required
argument :required
validate { |i| %w(all on none).include?(i) }
defaults 'on'
description 'the base set of methods to generate (adjusted with method_include and method_exclude): all, none, on (e.g., onClick)'
}
option('method_include') {
required
argument :required
defaults ''
description 'additional methods to add to the base list'
}
option('method_exclude') {
required
argument :required
defaults ''
description 'methods to remove from the base list'
}
option('implements') {
required
argument :required
defaults ''
description "for classes only, interfaces to implement (cannot be used with 'gen core all')"
}
option('force') {
argument :required
validate { |i| %w(include exclude).include?(i) }
description "force handling of added and deprecated methods (values: 'include' or 'exclude') unless individually included or excluded"
}
def run
abort("specify 'implements' only for Activity, Service, BroadcastReceiver, PreferenceActivity, or TabActivity") unless %w(Activity Service BroadcastReceiver PreferenceActivity TabActivity).include?(params['class'].value) or params['implements'].value == ''
generate_core_classes [:class, :method_base, :method_include, :method_exclude, :implements, :force].inject({}) { |h, i| h[i] = params[i.to_s].value; h }
end
end
end
mode 'update' do
require 'ruboto/util/update'
include Ruboto::Util::LogAction
include Ruboto::Util::Update
mode 'app' do
# FIXME(uwe): Change to cast to integer for better comparison
option('target', 't') {
argument :required
description "Android version to target #{VERSION_HELP_TEXT}"
cast { |t| t =~ API_NUMBER_PATTERN ? "android-#$1" : t }
validate { |t| t =~ API_LEVEL_PATTERN }
}
option('with-jruby') {
description 'Install the JRuby jars in your libs directory. Optionally set the JRuby version to install. Otherwise the latest available version is installed. If the JRuby jars are already present in your project, this option is implied.'
argument :optional
cast { |v| Gem::Version.new(v) }
validate { |v| Gem::Version.correct?(v) }
}
option('force') {
description "force an update even if the version hasn't changed"
}
def run
force = params['force'].value
old_version = read_ruboto_version
if old_version && Gem::Version.new(old_version) < Gem::Version.new(Ruboto::UPDATE_VERSION_LIMIT)
puts "Detected old Ruboto version: #{old_version}"
puts "Will use Ruboto #{Ruboto::UPDATE_VERSION_LIMIT} to update it first."
`gem query -i -n ruboto -v #{Ruboto::UPDATE_VERSION_LIMIT}`
system "gem install ruboto -v #{Ruboto::UPDATE_VERSION_LIMIT}" unless $? == 0
raise "Install of Ruboto #{Ruboto::UPDATE_VERSION_LIMIT} failed!" unless $? == 0
system "ruboto _#{Ruboto::UPDATE_VERSION_LIMIT}_ update app"
raise "Ruboto update app to #{Ruboto::UPDATE_VERSION_LIMIT} failed!" unless $? == 0
end
if (target = params['target'].value)
unless target =~ API_LEVEL_PATTERN
abort "Target must match #{API_LEVEL_PATTERN}: got #{target}"
end
unless $2.to_i >= MINIMUM_SUPPORTED_SDK_LEVEL
abort "Minimum Android api level is #{MINIMUM_SUPPORTED_SDK}: got #{target}"
end
target_level = target[API_NUMBER_PATTERN]
update_android(target_level)
update_test force, target_level
else
update_android
update_test force
end
update_assets old_version
update_ruboto force
update_classes old_version, force
update_dx_jar force
update_jruby force, params['with-jruby'].value
update_manifest nil, nil, force
update_icons force
update_core_classes 'exclude'
update_bundle
end
end
mode 'jruby' do
argument('version') {
required false
description 'The JRuby version to install. The jruby-jars gem of the same version should be installed on your system already.'
cast { |v| Gem::Version.new(v) }
validate { |v| Gem::Version.correct?(v) }
}
option('force') {
description "force an update even if the version hasn't changed"
}
def run
update_jruby(params['force'].value, params['version'].value, true) || abort
end
end
end
mode 'setup' do
require 'ruboto/util/setup'
include Ruboto::Util::Setup
option('target', 't') {
description "sets the target Android API level to set up for #{VERSION_HELP_TEXT}"
argument :required
default DEFAULT_TARGET_SDK
arity -1
cast { |t| t =~ API_NUMBER_PATTERN ? "android-#$1" : t }
validate { |t| t =~ API_LEVEL_PATTERN }
}
option('yes', 'y') {
description 'answer "yes" to all interactive questions. Will automatically install needed components.'
}
option('update', 'u') {
description 'updates intel haxm'
}
option('upgrade') {
description 'DEPRECATED: Use --update instead'
}
def run
update = params['update'].value
# FIXME(uwe): Remove after Ruboto 1.5.0 is released
update ||= params['upgrade'].value
# EMXIF
setup_ruboto(params['yes'].value, params['target'].values, update)
end
end
mode 'emulator' do
require 'ruboto/util/emulator'
include Ruboto::Util::Emulator
extend Ruboto::Util::Verify
api_level = project_api_level
option('target', 't') {
extend Ruboto::Util::Emulator
description 'sets the target Android API level for the emulator'
examples Ruboto::SdkVersions::API_LEVEL_TO_VERSION.keys.join(', ')
required unless api_level
argument :required
default(api_level) if api_level
cast { |t| t =~ API_NUMBER_PATTERN ? "android-#$1" : t }
validate { |t| t =~ API_LEVEL_PATTERN && sdk_level_name($2.to_i) }
}
option('no-snapshot', 's') {
description 'do not use a snapshot when starting the emulator'
}
def run
start_emulator(params['target'].value, params['no-snapshot'].value)
end
end
option 'version' do
description 'display ruboto version'
end
# just running `ruboto`
def run
version = Gem::Specification.find_by_name('ruboto').version.version
if params['version'].value
puts version
else
puts <<EOF
Ruboto -- Ruby for Android #{version}
Execute `ruboto init to initialize an Android Studio project with Ruboto
Execute `ruboto --help` for other options
EOF
end
end
end
end
end
end
end
| 38.827815 | 272 | 0.530104 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.