hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
d573d9ea9c7a12e60a82e364ac5f0435072d7062 | 102 | json.array!(@collection) do |cc_question|
json.id cc_question.id
json.label cc_question.label
end
| 20.4 | 41 | 0.77451 |
79cdcf074708cc405a46a9b9f3483658100eab37 | 6,335 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2017 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
module API
module V3
module Queries
module Filters
class QueryFilterInstanceRepresenter < ::API::Decorators::Single
include API::Decorators::LinkedResource
def initialize(model)
super(model, current_user: nil, embed_links: true)
end
link :schema do
api_v3_paths.query_filter_instance_schema(converted_name)
end
resource_link :filter,
getter: ->(*) {
{
href: api_v3_paths.query_filter(converted_name),
title: name
}
},
setter: ->(**) {
# nothing for now, handled in QueryRepresenter
}
resource_link :operator,
getter: ->(*) {
hash = {
href: api_v3_paths.query_operator(CGI.escape(represented.operator))
}
hash[:title] = represented.operator_class.human_name if represented.operator_class.present?
hash
},
setter: ->(fragment:, **) {
next unless fragment
represented.operator = ::API::Utilities::ResourceLinkParser
.parse_id fragment["href"],
property: 'operator',
expected_version: '3',
expected_namespace: 'queries/operators'
}
resources :values,
link: ->(*) {
next unless represented.ar_object_filter?
represented.value_objects_hash.map do |value_object|
value_object[:href] ||= begin
api_v3_paths.send(value_object[:identifier], value_object[:id])
rescue => e
Rails.logger.error "Failed to get href for value_object #{value_object}: #{e}"
nil
end
{
href: value_object[:href],
title: value_object[:name]
}
end
},
setter: ->(fragment:, **) {
next unless fragment
if represented.ar_object_filter?
set_link_values(fragment)
else
set_property_values(fragment)
end
},
getter: ->(*) {
if represented.respond_to?(:custom_field) &&
represented.custom_field.field_format == 'bool'
represented.values.map do |value|
if value == CustomValue::BoolStrategy::DB_VALUE_TRUE
true
else
false
end
end
else
represented.values
end
},
skip_render: ->(*) { represented.ar_object_filter? },
embedded: false
property :name,
exec_context: :decorator,
writeable: false
def _type
"#{converted_name.camelize}QueryFilter"
end
def name
represented.human_name
end
def set_link_values(vals)
represented.values = vals.map do |value|
::API::Utilities::ResourceLinkParser.parse(value["href"])[:id]
end
end
def set_property_values(vals)
represented.values = if represented.respond_to?(:custom_field) &&
represented.custom_field.field_format == 'bool'
vals.map do |value|
if value
CustomValue::BoolStrategy::DB_VALUE_TRUE
else
CustomValue::BoolStrategy::DB_VALUE_FALSE
end
end
else
vals
end
end
def converted_name
::API::Utilities::PropertyNameConverter.from_ar_name(represented.name)
end
def query_filter_instance_links_representer(represented)
::API::V3::Queries::Filters::QueryFilterInstanceLinksRepresenter.new represented, current_user: current_user
end
end
end
end
end
end
| 38.393939 | 120 | 0.474665 |
1df1cc9f3217d34574bc00d53a902db9a168819d | 389 | #!/usr/bin/env ruby
require 'mongo_percolator'
MongoPercolator.connect
class TimeTest2
include MongoMapper::Document
key :timeid, BSON::ObjectId
key :counter, Integer
before_save :update_timeid
def update_timeid
self.timeid = BSON::ObjectId.new
end
end
1.upto(10000) do |i|
TimeTest2.create! :counter => i
end
puts "#{TimeTest2.count} TimeTest2 documents inserted"
| 16.913043 | 54 | 0.745501 |
f8b1c1e979943a37ee53df079a0e46cf15d7bcfa | 1,179 | default["apt-cacher-ng"][:secret_file] = "/etc/chef/secrets/apt-cacher-ng"
default["apt-cacher-ng"][:confdir] = "/etc/apt-cacher-ng"
default["apt-cacher-ng"][:cachedir] = "/var/cache/apt-cacher-ng"
default["apt-cacher-ng"][:logdir] = "/var/log/apt-cacher-ng"
default["apt-cacher-ng"][:port] = "3142"
default["apt-cacher-ng"][:reportpage] = "acng-report.html"
default["apt-cacher-ng"][:extreshold] = 4
default["apt-cacher-ng"][:mappings] = [
{
:name => "debrep",
:value => "file:deb_mirror*.gz /debian ; file:backends_debian",
},
{
:name => "uburep",
:value => "file:ubuntu_mirrors /ubuntu ; file:backends_ubuntu",
},
{
:name => "debvol",
:value => "file:debvol_mirror*.gz /debian-volatile ; file:backends_debvol",
},
{
:name => "cygwin",
:value => "file:cygwin_mirrors /cygwin",
},
{
:name => "sfnet",
:value => "file:sfnet_mirrors",
},
{
:name => "alxrep",
:value => "file:archlx_mirrors /archlinux",
},
{
:name => "fedora",
:value => "file:fedora_mirrors",
},
{
:name => "epel",
:value => "file:epel_mirrors",
},
{
:name => "slrep",
:value => "file:sl_mirrors",
},
]
| 24.5625 | 79 | 0.581001 |
1ab6b72e1834a5f34d9cc68900cd32d19054f13b | 2,027 | # The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.cache_classes = false
config.action_view.cache_template_loading = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{1.hour.to_i}"
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.cache_store = :null_store
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Store uploaded files on the local file system in a temporary directory.
config.active_storage.service = :test
config.action_mailer.perform_caching = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
config.action_mailer.default_url_options = {host: 'localhost:3000'}
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations.
# config.action_view.raise_on_missing_translations = true
end
| 39.745098 | 85 | 0.77553 |
f79952afcc57a6993b649d987d9a07974cdbb274 | 37,379 | #!/usr/bin/env ruby
require 'spec_helper'
describe Oregano::Util do
include OreganoSpec::Files
# Discriminator for tests that attempts to unset HOME since that, for reasons currently unknown,
# doesn't work in Ruby >= 2.4.0
def self.gte_ruby_2_4
@gte_ruby_2_4 ||= SemanticOregano::Version.parse(RUBY_VERSION) >= SemanticOregano::Version.parse('2.4.0')
end
if Oregano.features.microsoft_windows?
def set_mode(mode, file)
Oregano::Util::Windows::Security.set_mode(mode, file)
end
def get_mode(file)
Oregano::Util::Windows::Security.get_mode(file) & 07777
end
else
def set_mode(mode, file)
File.chmod(mode, file)
end
def get_mode(file)
Oregano::FileSystem.lstat(file).mode & 07777
end
end
describe "#withenv" do
let(:mode) { Oregano.features.microsoft_windows? ? :windows : :posix }
before :each do
@original_path = ENV["PATH"]
@new_env = {:PATH => "/some/bogus/path"}
end
it "should change environment variables within the block then reset environment variables to their original values" do
Oregano::Util.withenv @new_env, mode do
expect(ENV["PATH"]).to eq("/some/bogus/path")
end
expect(ENV["PATH"]).to eq(@original_path)
end
it "should reset environment variables to their original values even if the block fails" do
begin
Oregano::Util.withenv @new_env, mode do
expect(ENV["PATH"]).to eq("/some/bogus/path")
raise "This is a failure"
end
rescue
end
expect(ENV["PATH"]).to eq(@original_path)
end
it "should reset environment variables even when they are set twice" do
# Setting Path & Environment parameters in Exec type can cause weirdness
@new_env["PATH"] = "/someother/bogus/path"
Oregano::Util.withenv @new_env, mode do
# When assigning duplicate keys, can't guarantee order of evaluation
expect(ENV["PATH"]).to match(/\/some.*\/bogus\/path/)
end
expect(ENV["PATH"]).to eq(@original_path)
end
it "should remove any new environment variables after the block ends" do
@new_env[:FOO] = "bar"
ENV["FOO"] = nil
Oregano::Util.withenv @new_env, mode do
expect(ENV["FOO"]).to eq("bar")
end
expect(ENV["FOO"]).to eq(nil)
end
end
describe "#withenv on POSIX", :unless => Oregano.features.microsoft_windows? do
it "should preserve case" do
# start with lower case key,
env_key = SecureRandom.uuid.downcase
begin
original_value = 'hello'
ENV[env_key] = original_value
new_value = 'goodbye'
Oregano::Util.withenv({env_key.upcase => new_value}, :posix) do
expect(ENV[env_key]).to eq(original_value)
expect(ENV[env_key.upcase]).to eq(new_value)
end
expect(ENV[env_key]).to eq(original_value)
expect(ENV[env_key.upcase]).to be_nil
ensure
ENV.delete(env_key)
end
end
end
describe "#withenv on Windows", :if => Oregano.features.microsoft_windows? do
let(:process) { Oregano::Util::Windows::Process }
it "should ignore case" do
# start with lower case key, ensuring string is not entirely numeric
env_key = SecureRandom.uuid.downcase + 'a'
begin
original_value = 'hello'
ENV[env_key] = original_value
new_value = 'goodbye'
Oregano::Util.withenv({env_key.upcase => new_value}, :windows) do
expect(ENV[env_key]).to eq(new_value)
expect(ENV[env_key.upcase]).to eq(new_value)
end
expect(ENV[env_key]).to eq(original_value)
expect(ENV[env_key.upcase]).to eq(original_value)
ensure
ENV.delete(env_key)
end
end
def withenv_utf8(&block)
env_var_name = SecureRandom.uuid
utf_8_bytes = [225, 154, 160] # rune ᚠ
utf_8_key = env_var_name + utf_8_bytes.pack('c*').force_encoding(Encoding::UTF_8)
utf_8_value = utf_8_key + 'value'
codepage_key = utf_8_key.dup.force_encoding(Encoding.default_external)
Oregano::Util.withenv({utf_8_key => utf_8_value}, :windows) do
# the true Windows environment APIs see the variables correctly
expect(process.get_environment_strings[utf_8_key]).to eq(utf_8_value)
# the string contain the same bytes, but have different Ruby metadata
expect(utf_8_key.bytes.to_a).to eq(codepage_key.bytes.to_a)
yield utf_8_key, utf_8_value, codepage_key
end
# real environment shouldn't have env var anymore
expect(process.get_environment_strings[utf_8_key]).to eq(nil)
end
# document buggy Ruby behavior here for https://bugs.ruby-lang.org/issues/8822
# Ruby retrieves / stores ENV names in the current codepage
# when these tests no longer pass, Ruby has fixed its bugs and workarounds can be removed
# interestingly we would expect some of these tests to fail when codepage is 65001
# but instead the env values are in Encoding::ASCII_8BIT!
it "works around Ruby bug 8822 (which fails to preserve UTF-8 properly when accessing ENV) (Ruby <= 2.1) ",
:if => ((RUBY_VERSION =~ /^(1\.|2\.0\.|2\.1\.)/) && Oregano.features.microsoft_windows?) do
withenv_utf8 do |utf_8_key, utf_8_value, codepage_key|
# both a string in UTF-8 and current codepage are deemed valid keys to the hash
# which is because Ruby compares the BINARY versions of the string, but ignores encoding
expect(ENV.key?(codepage_key)).to eq(true)
expect(ENV.key?(utf_8_key)).to eq(true)
# Ruby's ENV.keys has slightly different behavior than ENV.key?(key)
# the keys collection in 2.1 has a string with the correct bytes
# (codepage_key / utf_8_key have same bytes for the sake of searching)
found = ENV.keys.find { |k| k.bytes == codepage_key.bytes }
# but the string is actually a binary string
expect(found.encoding).to eq(Encoding::BINARY)
# meaning we can't use include? to find it in either UTF-8 or codepage encoding
expect(ENV.keys.include?(codepage_key)).to eq(false)
expect(ENV.keys.include?(utf_8_key)).to eq(false)
# and can only search with a BINARY encoded string
expect(ENV.keys.include?(utf_8_key.dup.force_encoding(Encoding::BINARY))).to eq(true)
# similarly the value stored at the real key is in current codepage
# but won't match real UTF-8 value
env_value = ENV[utf_8_key]
expect(env_value).to_not eq(utf_8_value)
expect(env_value.encoding).to_not eq(Encoding::UTF_8)
# but it can be forced back to UTF-8 to make it match.. ugh
converted_value = ENV[utf_8_key].dup.force_encoding(Encoding::UTF_8)
expect(converted_value).to eq(utf_8_value)
end
end
# but in 2.3, the behavior is mostly correct when external codepage is 65001 / UTF-8
it "works around Ruby bug 8822 (which fails to preserve UTF-8 properly when accessing ENV) (Ruby >= 2.3.x) ",
:if => ((match = RUBY_VERSION.match(/^2\.(\d+)\./)) && match.captures[0].to_i >= 3 && Oregano.features.microsoft_windows?) do
raise 'This test requires a non-UTF8 codepage' if Encoding.default_external == Encoding::UTF_8
withenv_utf8 do |utf_8_key, utf_8_value, codepage_key|
# Ruby 2.3 fixes access by the original UTF-8 key, and behaves differently than 2.1
# keying by local codepage will work only when the UTF-8 can be converted to local codepage
# the key selected for this test contains characters unavailable to a local codepage, hence doesn't work
# On Japanese Windows (Code Page 932) this test resolves as true.
# otherwise the key selected for this test contains characters
# unavailable to a local codepage, hence doesn't work
# HACK: tech debt to replace once PUP-7019 is understood
should_be_found = (Encoding.default_external == Encoding::CP932)
expect(ENV.key?(codepage_key)).to eq(should_be_found)
expect(ENV.key?(utf_8_key)).to eq(true)
# Ruby's ENV.keys has slightly different behavior than ENV.key?(key), and 2.3 differs from 2.1
# (codepage_key / utf_8_key have same bytes for the sake of searching)
found = ENV.keys.find { |k| k.bytes == codepage_key.bytes }
# the keys collection in 2.3 does not have a string with the correct bytes!
# a corrupt version of the key exists with the bytes [225, 154, 160] replaced with [63]!
expect(found).to be_nil
# given the key is corrupted, include? cannot be used to find it in either UTF-8 or codepage encoding
expect(ENV.keys.include?(codepage_key)).to eq(false)
expect(ENV.keys.include?(utf_8_key)).to eq(false)
# The value stored at the UTF-8 key is a corrupted current codepage string and won't match UTF-8 value
# again the bytes [225, 154, 160] have irreversibly been changed to [63]!
env_value = ENV[utf_8_key]
expect(env_value).to_not eq(utf_8_value)
expect(env_value.encoding).to_not eq(Encoding::UTF_8)
# the ENV value returned will be in the local codepage which may or may not be able to be
# encoded to UTF8. Our test UTF8 data is not convertible to non-Unicode codepages
converted_value = ENV[utf_8_key].dup.force_encoding(Encoding::UTF_8)
expect(converted_value).to_not eq(utf_8_value)
end
end
it "should preseve existing environment and should not corrupt UTF-8 environment variables" do
env_var_name = SecureRandom.uuid
utf_8_bytes = [225, 154, 160] # rune ᚠ
utf_8_str = env_var_name + utf_8_bytes.pack('c*').force_encoding(Encoding::UTF_8)
env_var_name_utf_8 = utf_8_str
begin
# UTF-8 name and value
process.set_environment_variable(env_var_name_utf_8, utf_8_str)
# ASCII name / UTF-8 value
process.set_environment_variable(env_var_name, utf_8_str)
original_keys = process.get_environment_strings.keys.to_a
Oregano::Util.withenv({}, :windows) { }
env = process.get_environment_strings
expect(env[env_var_name]).to eq(utf_8_str)
expect(env[env_var_name_utf_8]).to eq(utf_8_str)
expect(env.keys.to_a).to eq(original_keys)
ensure
process.set_environment_variable(env_var_name_utf_8, nil)
process.set_environment_variable(env_var_name, nil)
end
end
end
describe "#absolute_path?" do
describe "on posix systems", :if => Oregano.features.posix? do
it "should default to the platform of the local system" do
expect(Oregano::Util).to be_absolute_path('/foo')
expect(Oregano::Util).not_to be_absolute_path('C:/foo')
end
end
describe "on windows", :if => Oregano.features.microsoft_windows? do
it "should default to the platform of the local system" do
expect(Oregano::Util).to be_absolute_path('C:/foo')
expect(Oregano::Util).not_to be_absolute_path('/foo')
end
end
describe "when using platform :posix" do
%w[/ /foo /foo/../bar //foo //Server/Foo/Bar //?/C:/foo/bar /\Server/Foo /foo//bar/baz].each do |path|
it "should return true for #{path}" do
expect(Oregano::Util).to be_absolute_path(path, :posix)
end
end
%w[. ./foo \foo C:/foo \\Server\Foo\Bar \\?\C:\foo\bar \/?/foo\bar \/Server/foo foo//bar/baz].each do |path|
it "should return false for #{path}" do
expect(Oregano::Util).not_to be_absolute_path(path, :posix)
end
end
end
describe "when using platform :windows" do
%w[C:/foo C:\foo \\\\Server\Foo\Bar \\\\?\C:\foo\bar //Server/Foo/Bar //?/C:/foo/bar /\?\C:/foo\bar \/Server\Foo/Bar c:/foo//bar//baz].each do |path|
it "should return true for #{path}" do
expect(Oregano::Util).to be_absolute_path(path, :windows)
end
end
%w[/ . ./foo \foo /foo /foo/../bar //foo C:foo/bar foo//bar/baz].each do |path|
it "should return false for #{path}" do
expect(Oregano::Util).not_to be_absolute_path(path, :windows)
end
end
end
end
describe "#path_to_uri" do
# different UTF-8 widths
# 1-byte A
# 2-byte ۿ - http://www.fileformat.info/info/unicode/char/06ff/index.htm - 0xDB 0xBF / 219 191
# 3-byte ᚠ - http://www.fileformat.info/info/unicode/char/16A0/index.htm - 0xE1 0x9A 0xA0 / 225 154 160
# 4-byte - http://www.fileformat.info/info/unicode/char/2070E/index.htm - 0xF0 0xA0 0x9C 0x8E / 240 160 156 142
let (:mixed_utf8) { "A\u06FF\u16A0\u{2070E}" } # Aۿᚠ
let (:mixed_utf8_urlencoded) { "A%DB%BF%E1%9A%A0%F0%A0%9C%8E" }
%w[. .. foo foo/bar foo/../bar].each do |path|
it "should reject relative path: #{path}" do
expect { Oregano::Util.path_to_uri(path) }.to raise_error(Oregano::Error)
end
end
it "should perform URI escaping" do
expect(Oregano::Util.path_to_uri("/foo bar").path).to eq("/foo%20bar")
end
it "should properly URI encode + and space in path" do
expect(Oregano::Util.path_to_uri("/foo+foo bar").path).to eq("/foo+foo%20bar")
end
# reserved characters are different for each part
# https://web.archive.org/web/20151229061347/http://blog.lunatech.com/2009/02/03/what-every-web-developer-must-know-about-url-encoding#Thereservedcharactersaredifferentforeachpart
# "?" is allowed unescaped anywhere within a query part,
# "/" is allowed unescaped anywhere within a query part,
# "=" is allowed unescaped anywhere within a path parameter or query parameter value, and within a path segment,
# ":@-._~!$&'()*+,;=" are allowed unescaped anywhere within a path segment part,
# "/?:@-._~!$&'()*+,;=" are allowed unescaped anywhere within a fragment part.
it "should properly URI encode + and space in path and query" do
path = "/foo+foo bar?foo+foo bar"
uri = Oregano::Util.path_to_uri(path)
# Ruby 1.9.3 URI#to_s has a bug that returns ASCII always
# despite parts being UTF-8 strings
expected_encoding = RUBY_VERSION == '1.9.3' ? Encoding::ASCII : Encoding::UTF_8
expect(uri.to_s.encoding).to eq(expected_encoding)
expect(uri.path).to eq("/foo+foo%20bar")
# either + or %20 is correct for an encoded space in query
# + is usually used for backward compatibility, but %20 is preferred for compat with Uri.unescape
expect(uri.query).to eq("foo%2Bfoo%20bar")
# complete roundtrip
expect(URI.unescape(uri.to_s)).to eq("file:#{path}")
expect(URI.unescape(uri.to_s).encoding).to eq(expected_encoding)
end
it "should perform UTF-8 URI escaping" do
uri = Oregano::Util.path_to_uri("/#{mixed_utf8}")
expect(uri.path.encoding).to eq(Encoding::UTF_8)
expect(uri.path).to eq("/#{mixed_utf8_urlencoded}")
end
describe "when using platform :posix" do
before :each do
Oregano.features.stubs(:posix).returns true
Oregano.features.stubs(:microsoft_windows?).returns false
end
%w[/ /foo /foo/../bar].each do |path|
it "should convert #{path} to URI" do
expect(Oregano::Util.path_to_uri(path).path).to eq(path)
end
end
end
describe "when using platform :windows" do
before :each do
Oregano.features.stubs(:posix).returns false
Oregano.features.stubs(:microsoft_windows?).returns true
end
it "should normalize backslashes" do
expect(Oregano::Util.path_to_uri('c:\\foo\\bar\\baz').path).to eq('/' + 'c:/foo/bar/baz')
end
%w[C:/ C:/foo/bar].each do |path|
it "should convert #{path} to absolute URI" do
expect(Oregano::Util.path_to_uri(path).path).to eq('/' + path)
end
end
%w[share C$].each do |path|
it "should convert UNC #{path} to absolute URI" do
uri = Oregano::Util.path_to_uri("\\\\server\\#{path}")
expect(uri.host).to eq('server')
expect(uri.path).to eq('/' + Oregano::Util.uri_encode(path))
end
end
end
end
describe "#uri_query_encode" do
# different UTF-8 widths
# 1-byte A
# 2-byte ۿ - http://www.fileformat.info/info/unicode/char/06ff/index.htm - 0xDB 0xBF / 219 191
# 3-byte ᚠ - http://www.fileformat.info/info/unicode/char/16A0/index.htm - 0xE1 0x9A 0xA0 / 225 154 160
# 4-byte 𠜎 - http://www.fileformat.info/info/unicode/char/2070E/index.htm - 0xF0 0xA0 0x9C 0x8E / 240 160 156 142
let (:mixed_utf8) { "A\u06FF\u16A0\u{2070E}" } # Aۿᚠ𠜎
let (:mixed_utf8_urlencoded) { "A%DB%BF%E1%9A%A0%F0%A0%9C%8E" }
it "should perform basic URI escaping that includes space and +" do
expect(Oregano::Util.uri_query_encode("foo bar+foo")).to eq("foo%20bar%2Bfoo")
end
it "should URI encode any special characters: = + <space> & * and #" do
expect(Oregano::Util.uri_query_encode("foo=bar+foo baz&bar=baz qux&special= *&qux=not fragment#")).to eq("foo%3Dbar%2Bfoo%20baz%26bar%3Dbaz%20qux%26special%3D%20%2A%26qux%3Dnot%20fragment%23")
end
[
"A\u06FF\u16A0\u{2070E}",
"A\u06FF\u16A0\u{2070E}".force_encoding(Encoding::BINARY)
].each do |uri_string|
it "should perform UTF-8 URI escaping, even when input strings are not UTF-8" do
uri = Oregano::Util.uri_query_encode(mixed_utf8)
expect(uri.encoding).to eq(Encoding::UTF_8)
expect(uri).to eq(mixed_utf8_urlencoded)
end
end
it "should be usable by URI::parse" do
uri = URI::parse("oregano://server/path?" + Oregano::Util.uri_query_encode(mixed_utf8))
expect(uri.scheme).to eq('oregano')
expect(uri.host).to eq('server')
expect(uri.path).to eq('/path')
expect(uri.query).to eq(mixed_utf8_urlencoded)
end
it "should be usable by URI::Generic.build" do
params = {
:scheme => 'file',
:host => 'foobar',
:path => '/path/to',
:query => Oregano::Util.uri_query_encode(mixed_utf8)
}
uri = URI::Generic.build(params)
expect(uri.scheme).to eq('file')
expect(uri.host).to eq('foobar')
expect(uri.path).to eq("/path/to")
expect(uri.query).to eq(mixed_utf8_urlencoded)
end
end
describe "#uri_encode" do
# different UTF-8 widths
# 1-byte A
# 2-byte ۿ - http://www.fileformat.info/info/unicode/char/06ff/index.htm - 0xDB 0xBF / 219 191
# 3-byte ᚠ - http://www.fileformat.info/info/unicode/char/16A0/index.htm - 0xE1 0x9A 0xA0 / 225 154 160
# 4-byte - http://www.fileformat.info/info/unicode/char/2070E/index.htm - 0xF0 0xA0 0x9C 0x8E / 240 160 156 142
let (:mixed_utf8) { "A\u06FF\u16A0\u{2070E}" } # Aۿᚠ
let (:mixed_utf8_urlencoded) { "A%DB%BF%E1%9A%A0%F0%A0%9C%8E" }
it "should perform URI escaping" do
expect(Oregano::Util.uri_encode("/foo bar")).to eq("/foo%20bar")
end
[
"A\u06FF\u16A0\u{2070E}",
"A\u06FF\u16A0\u{2070E}".force_encoding(Encoding::BINARY)
].each do |uri_string|
it "should perform UTF-8 URI escaping, even when input strings are not UTF-8" do
uri = Oregano::Util.uri_encode(mixed_utf8)
expect(uri.encoding).to eq(Encoding::UTF_8)
expect(uri).to eq(mixed_utf8_urlencoded)
end
end
it "should treat & and = as delimiters in a query string, but URI encode other special characters: + <space> * and #" do
input = "http://foo.bar.com/path?foo=bar+foo baz&bar=baz qux&special= *&qux=not fragment#"
expected_output = "http://foo.bar.com/path?foo=bar%2Bfoo%20baz&bar=baz%20qux&special=%20%2A&qux=not%20fragment%23"
expect(Oregano::Util.uri_encode(input)).to eq(expected_output)
end
it "should be usable by URI::parse" do
uri = URI::parse(Oregano::Util.uri_encode("oregano://server/path/to/#{mixed_utf8}"))
expect(uri.scheme).to eq('oregano')
expect(uri.host).to eq('server')
expect(uri.path).to eq("/path/to/#{mixed_utf8_urlencoded}")
end
it "should be usable by URI::Generic.build" do
params = {
:scheme => 'file',
:host => 'foobar',
:path => Oregano::Util.uri_encode("/path/to/#{mixed_utf8}")
}
uri = URI::Generic.build(params)
expect(uri.scheme).to eq('file')
expect(uri.host).to eq('foobar')
expect(uri.path).to eq("/path/to/#{mixed_utf8_urlencoded}")
end
describe "when using platform :posix" do
before :each do
Oregano.features.stubs(:posix).returns true
Oregano.features.stubs(:microsoft_windows?).returns false
end
%w[/ /foo /foo/../bar].each do |path|
it "should not replace / in #{path} with %2F" do
expect(Oregano::Util.uri_encode(path)).to eq(path)
end
end
end
describe "with fragment support" do
context "disabled by default" do
it "should encode # as %23 in path" do
encoded = Oregano::Util.uri_encode("/foo bar#fragment")
expect(encoded).to eq("/foo%20bar%23fragment")
end
it "should encode # as %23 in query" do
encoded = Oregano::Util.uri_encode("/foo bar?baz+qux#fragment")
expect(encoded).to eq("/foo%20bar?baz%2Bqux%23fragment")
end
end
context "optionally enabled" do
it "should leave fragment delimiter # after encoded paths" do
encoded = Oregano::Util.uri_encode("/foo bar#fragment", { :allow_fragment => true })
expect(encoded).to eq("/foo%20bar#fragment")
end
it "should leave fragment delimiter # after encoded query" do
encoded = Oregano::Util.uri_encode("/foo bar?baz+qux#fragment", { :allow_fragment => true })
expect(encoded).to eq("/foo%20bar?baz%2Bqux#fragment")
end
end
end
describe "when using platform :windows" do
before :each do
Oregano.features.stubs(:posix).returns false
Oregano.features.stubs(:microsoft_windows?).returns true
end
it "should url encode \\ as %5C, but not replace : as %3F" do
expect(Oregano::Util.uri_encode('c:\\foo\\bar\\baz')).to eq('c:%5Cfoo%5Cbar%5Cbaz')
end
%w[C:/ C:/foo/bar].each do |path|
it "should not replace / in #{path} with %2F" do
expect(Oregano::Util.uri_encode(path)).to eq(path)
end
end
end
end
describe ".uri_to_path" do
require 'uri'
# different UTF-8 widths
# 1-byte A
# 2-byte ۿ - http://www.fileformat.info/info/unicode/char/06ff/index.htm - 0xDB 0xBF / 219 191
# 3-byte ᚠ - http://www.fileformat.info/info/unicode/char/16A0/index.htm - 0xE1 0x9A 0xA0 / 225 154 160
# 4-byte 𠜎 - http://www.fileformat.info/info/unicode/char/2070E/index.htm - 0xF0 0xA0 0x9C 0x8E / 240 160 156 142
let (:mixed_utf8) { "A\u06FF\u16A0\u{2070E}" } # Aۿᚠ𠜎
it "should strip host component" do
expect(Oregano::Util.uri_to_path(URI.parse('http://foo/bar'))).to eq('/bar')
end
it "should accept oregano URLs" do
expect(Oregano::Util.uri_to_path(URI.parse('oregano:///modules/foo'))).to eq('/modules/foo')
end
it "should return unencoded path" do
expect(Oregano::Util.uri_to_path(URI.parse('http://foo/bar%20baz'))).to eq('/bar baz')
end
[
"http://foo/A%DB%BF%E1%9A%A0%F0%A0%9C%8E",
"http://foo/A%DB%BF%E1%9A%A0%F0%A0%9C%8E".force_encoding(Encoding::ASCII)
].each do |uri_string|
it "should return paths as UTF-8" do
path = Oregano::Util.uri_to_path(URI.parse(uri_string))
expect(path).to eq("/#{mixed_utf8}")
expect(path.encoding).to eq(Encoding::UTF_8)
end
end
it "should be nil-safe" do
expect(Oregano::Util.uri_to_path(nil)).to be_nil
end
describe "when using platform :posix",:if => Oregano.features.posix? do
it "should accept root" do
expect(Oregano::Util.uri_to_path(URI.parse('file:/'))).to eq('/')
end
it "should accept single slash" do
expect(Oregano::Util.uri_to_path(URI.parse('file:/foo/bar'))).to eq('/foo/bar')
end
it "should accept triple slashes" do
expect(Oregano::Util.uri_to_path(URI.parse('file:///foo/bar'))).to eq('/foo/bar')
end
end
describe "when using platform :windows", :if => Oregano.features.microsoft_windows? do
it "should accept root" do
expect(Oregano::Util.uri_to_path(URI.parse('file:/C:/'))).to eq('C:/')
end
it "should accept single slash" do
expect(Oregano::Util.uri_to_path(URI.parse('file:/C:/foo/bar'))).to eq('C:/foo/bar')
end
it "should accept triple slashes" do
expect(Oregano::Util.uri_to_path(URI.parse('file:///C:/foo/bar'))).to eq('C:/foo/bar')
end
it "should accept file scheme with double slashes as a UNC path" do
expect(Oregano::Util.uri_to_path(URI.parse('file://host/share/file'))).to eq('//host/share/file')
end
end
end
describe "safe_posix_fork" do
let(:pid) { 5501 }
before :each do
# Most of the things this method does are bad to do during specs. :/
Kernel.stubs(:fork).returns(pid).yields
$stdin.stubs(:reopen)
$stdout.stubs(:reopen)
$stderr.stubs(:reopen)
# ensure that we don't really close anything!
(0..256).each {|n| IO.stubs(:new) }
end
it "should close all open file descriptors except stdin/stdout/stderr when /proc/self/fd exists" do
# This is ugly, but I can't really think of a better way to do it without
# letting it actually close fds, which seems risky
fds = [".", "..","0","1","2","3","5","100","1000"]
fds.each do |fd|
if fd == '.' || fd == '..'
next
elsif ['0', '1', '2'].include? fd
IO.expects(:new).with(fd.to_i).never
else
IO.expects(:new).with(fd.to_i).returns mock('io', :close)
end
end
Dir.stubs(:foreach).with('/proc/self/fd').multiple_yields(*fds)
Oregano::Util.safe_posix_fork
end
it "should close all open file descriptors except stdin/stdout/stderr when /proc/self/fd doesn't exists" do
# This is ugly, but I can't really think of a better way to do it without
# letting it actually close fds, which seems risky
(0..2).each {|n| IO.expects(:new).with(n).never}
(3..256).each { |n| IO.expects(:new).with(n).returns mock('io', :close) }
Dir.stubs(:foreach).with('/proc/self/fd') { raise Errno::ENOENT }
Oregano::Util.safe_posix_fork
end
it "should fork a child process to execute the block" do
Kernel.expects(:fork).returns(pid).yields
Oregano::Util.safe_posix_fork do
message = "Fork this!"
end
end
it "should return the pid of the child process" do
expect(Oregano::Util.safe_posix_fork).to eq(pid)
end
end
describe "#which" do
let(:base) { File.expand_path('/bin') }
let(:path) { File.join(base, 'foo') }
before :each do
FileTest.stubs(:file?).returns false
FileTest.stubs(:file?).with(path).returns true
FileTest.stubs(:executable?).returns false
FileTest.stubs(:executable?).with(path).returns true
end
it "should accept absolute paths" do
expect(Oregano::Util.which(path)).to eq(path)
end
it "should return nil if no executable found" do
expect(Oregano::Util.which('doesnotexist')).to be_nil
end
it "should warn if the user's HOME is not set but their PATH contains a ~", :unless => gte_ruby_2_4 do
env_path = %w[~/bin /usr/bin /bin].join(File::PATH_SEPARATOR)
env = {:HOME => nil, :PATH => env_path}
env.merge!({:HOMEDRIVE => nil, :USERPROFILE => nil}) if Oregano.features.microsoft_windows?
Oregano::Util.withenv(env) do
Oregano::Util::Warnings.expects(:warnonce).once
Oregano::Util.which('foo')
end
end
it "should reject directories" do
expect(Oregano::Util.which(base)).to be_nil
end
it "should ignore ~user directories if the user doesn't exist" do
# Windows treats *any* user as a "user that doesn't exist", which means
# that this will work correctly across all our platforms, and should
# behave consistently. If they ever implement it correctly (eg: to do
# the lookup for real) it should just work transparently.
baduser = 'if_this_user_exists_I_will_eat_my_hat'
Oregano::Util.withenv("PATH" => "~#{baduser}#{File::PATH_SEPARATOR}#{base}") do
expect(Oregano::Util.which('foo')).to eq(path)
end
end
describe "on POSIX systems" do
before :each do
Oregano.features.stubs(:posix?).returns true
Oregano.features.stubs(:microsoft_windows?).returns false
end
it "should walk the search PATH returning the first executable" do
Oregano::Util.stubs(:get_env).with('PATH').returns(File.expand_path('/bin'))
Oregano::Util.stubs(:get_env).with('PATHEXT').returns(nil)
expect(Oregano::Util.which('foo')).to eq(path)
end
end
describe "on Windows systems" do
let(:path) { File.expand_path(File.join(base, 'foo.CMD')) }
before :each do
Oregano.features.stubs(:posix?).returns false
Oregano.features.stubs(:microsoft_windows?).returns true
end
describe "when a file extension is specified" do
it "should walk each directory in PATH ignoring PATHEXT" do
Oregano::Util.stubs(:get_env).with('PATH').returns(%w[/bar /bin].map{|dir| File.expand_path(dir)}.join(File::PATH_SEPARATOR))
Oregano::Util.stubs(:get_env).with('PATHEXT').returns('.FOOBAR')
FileTest.expects(:file?).with(File.join(File.expand_path('/bar'), 'foo.CMD')).returns false
expect(Oregano::Util.which('foo.CMD')).to eq(path)
end
end
describe "when a file extension is not specified" do
it "should walk each extension in PATHEXT until an executable is found" do
bar = File.expand_path('/bar')
Oregano::Util.stubs(:get_env).with('PATH').returns("#{bar}#{File::PATH_SEPARATOR}#{base}")
Oregano::Util.stubs(:get_env).with('PATHEXT').returns(".EXE#{File::PATH_SEPARATOR}.CMD")
exts = sequence('extensions')
FileTest.expects(:file?).in_sequence(exts).with(File.join(bar, 'foo.EXE')).returns false
FileTest.expects(:file?).in_sequence(exts).with(File.join(bar, 'foo.CMD')).returns false
FileTest.expects(:file?).in_sequence(exts).with(File.join(base, 'foo.EXE')).returns false
FileTest.expects(:file?).in_sequence(exts).with(path).returns true
expect(Oregano::Util.which('foo')).to eq(path)
end
it "should walk the default extension path if the environment variable is not defined" do
Oregano::Util.stubs(:get_env).with('PATH').returns(base)
Oregano::Util.stubs(:get_env).with('PATHEXT').returns(nil)
exts = sequence('extensions')
%w[.COM .EXE .BAT].each do |ext|
FileTest.expects(:file?).in_sequence(exts).with(File.join(base, "foo#{ext}")).returns false
end
FileTest.expects(:file?).in_sequence(exts).with(path).returns true
expect(Oregano::Util.which('foo')).to eq(path)
end
it "should fall back if no extension matches" do
Oregano::Util.stubs(:get_env).with('PATH').returns(base)
Oregano::Util.stubs(:get_env).with('PATHEXT').returns(".EXE")
FileTest.stubs(:file?).with(File.join(base, 'foo.EXE')).returns false
FileTest.stubs(:file?).with(File.join(base, 'foo')).returns true
FileTest.stubs(:executable?).with(File.join(base, 'foo')).returns true
expect(Oregano::Util.which('foo')).to eq(File.join(base, 'foo'))
end
end
end
end
describe "hash symbolizing functions" do
let (:myhash) { { "foo" => "bar", :baz => "bam" } }
let (:resulthash) { { :foo => "bar", :baz => "bam" } }
describe "#symbolizehash" do
it "should return a symbolized hash" do
newhash = Oregano::Util.symbolizehash(myhash)
expect(newhash).to eq(resulthash)
end
end
end
context "#replace_file" do
subject { Oregano::Util }
it { is_expected.to respond_to :replace_file }
let :target do
target = Tempfile.new("oregano-util-replace-file")
target.puts("hello, world")
target.flush # make sure content is on disk.
target.fsync rescue nil
target.close
target
end
it "should fail if no block is given" do
expect { subject.replace_file(target.path, 0600) }.to raise_error /block/
end
it "should replace a file when invoked" do
# Check that our file has the expected content.
expect(File.read(target.path)).to eq("hello, world\n")
# Replace the file.
subject.replace_file(target.path, 0600) do |fh|
fh.puts "I am the passenger..."
end
# ...and check the replacement was complete.
expect(File.read(target.path)).to eq("I am the passenger...\n")
end
# When running with the same user and group sid, which is the default,
# Windows collapses the owner and group modes into a single ACE, resulting
# in set(0600) => get(0660) and so forth. --daniel 2012-03-30
modes = [0555, 0660, 0770]
modes += [0600, 0700] unless Oregano.features.microsoft_windows?
modes.each do |mode|
it "should copy 0#{mode.to_s(8)} permissions from the target file by default" do
set_mode(mode, target.path)
expect(get_mode(target.path)).to eq(mode)
subject.replace_file(target.path, 0000) {|fh| fh.puts "bazam" }
expect(get_mode(target.path)).to eq(mode)
expect(File.read(target.path)).to eq("bazam\n")
end
end
it "should copy the permissions of the source file before yielding on Unix", :if => !Oregano.features.microsoft_windows? do
set_mode(0555, target.path)
inode = Oregano::FileSystem.stat(target.path).ino
yielded = false
subject.replace_file(target.path, 0600) do |fh|
expect(get_mode(fh.path)).to eq(0555)
yielded = true
end
expect(yielded).to be_truthy
expect(Oregano::FileSystem.stat(target.path).ino).not_to eq(inode)
expect(get_mode(target.path)).to eq(0555)
end
it "should use the default permissions if the source file doesn't exist" do
new_target = target.path + '.foo'
expect(Oregano::FileSystem.exist?(new_target)).to be_falsey
begin
subject.replace_file(new_target, 0555) {|fh| fh.puts "foo" }
expect(get_mode(new_target)).to eq(0555)
ensure
Oregano::FileSystem.unlink(new_target) if Oregano::FileSystem.exist?(new_target)
end
end
it "should not replace the file if an exception is thrown in the block" do
yielded = false
threw = false
begin
subject.replace_file(target.path, 0600) do |fh|
yielded = true
fh.puts "different content written, then..."
raise "...throw some random failure"
end
rescue Exception => e
if e.to_s =~ /some random failure/
threw = true
else
raise
end
end
expect(yielded).to be_truthy
expect(threw).to be_truthy
# ...and check the replacement was complete.
expect(File.read(target.path)).to eq("hello, world\n")
end
{:string => '664', :number => 0664, :symbolic => "ug=rw-,o=r--" }.each do |label,mode|
it "should support #{label} format permissions" do
new_target = target.path + "#{mode}.foo"
expect(Oregano::FileSystem.exist?(new_target)).to be_falsey
begin
subject.replace_file(new_target, mode) {|fh| fh.puts "this is an interesting content" }
expect(get_mode(new_target)).to eq(0664)
ensure
Oregano::FileSystem.unlink(new_target) if Oregano::FileSystem.exist?(new_target)
end
end
end
end
describe "#pretty_backtrace" do
it "should include lines that don't match the standard backtrace pattern" do
line = "non-standard line\n"
trace = caller[0..2] + [line] + caller[3..-1]
expect(Oregano::Util.pretty_backtrace(trace)).to match(/#{line}/)
end
it "should include function names" do
expect(Oregano::Util.pretty_backtrace).to match(/:in `\w+'/)
end
it "should work with Windows paths" do
expect(Oregano::Util.pretty_backtrace(["C:/work/oregano/c.rb:12:in `foo'\n"])).
to eq("C:/work/oregano/c.rb:12:in `foo'")
end
end
describe "#deterministic_rand" do
it "should not fiddle with future rand calls" do
Oregano::Util.deterministic_rand(123,20)
rand_one = rand()
Oregano::Util.deterministic_rand(123,20)
expect(rand()).not_to eql(rand_one)
end
if defined?(Random) == 'constant' && Random.class == Class
it "should not fiddle with the global seed" do
srand(1234)
Oregano::Util.deterministic_rand(123,20)
expect(srand()).to eql(1234)
end
# ruby below 1.9.2 variant
else
it "should set a new global seed" do
srand(1234)
Oregano::Util.deterministic_rand(123,20)
expect(srand()).not_to eql(1234)
end
end
end
end
| 37.948223 | 198 | 0.642179 |
4a81793241373547aa2c6d91dadd7cf107f7211e | 1,292 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150317220442) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "courses", force: :cascade do |t|
t.string "name"
t.string "description"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "lessons", force: :cascade do |t|
t.string "title"
t.text "content"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "lesson_order"
t.integer "course_id"
end
end
| 35.888889 | 86 | 0.743808 |
7938bc11b52667ce3a3465dd9bf79304758625f8 | 1,164 | require 'rails_helper'
RSpec.describe MicropostsController, type: :controller do
context "ポストチェック" do
before do
# @user = create(:testuser) # FactoryBot.create(:testuser)を短縮 buildだとDB登録されない
@user = create(:testuser, name: "SoichiKamiya")
@micropost = create(:orange)
end
it "ログインしていない場合 create でリダイレクトされるか" do
post :create, params: { micropost: { content: "Lorem ipsum" } }
expect(response).to redirect_to login_path
end
it "ログインしていない場合 destroy リダイレクトされるか" do
delete :destroy, params: { id: @micropost.id }
expect(response).to redirect_to login_path
end
end
context "別ユーザーのポストを削除しようとするとリダイレクトされるか" do
before do
5.times { create(:user) }
5.times { |n| create(:microposts, user: User.find(n+1)) }
end
it "ログインしていない場合 create でリダイレクトされるか" do
# 当ページからログイン出来ない
# post login_url, params: { session: { email: "[email protected]", password: "Password1" } }
micropost = Micropost.second
expect { delete :destroy, params: { id: micropost.id } }.to change(Micropost, :count).by(0)
expect(response).to redirect_to login_path
end
end
end
| 31.459459 | 99 | 0.665808 |
5d449e8ebb123a0ea6c90d103faf3df70b676526 | 962 | require 'rspec'
require 'tap_impl/day02'
describe 'tap impl method' do
before(:each) do
@local = 'self'
end
it 'has to return self' do
expect(@local.tap_impl).to be @local
end
it 'has to pass self to a block' do
expect { |block| @local.tap_impl(&block) }.to yield_with_args(@local)
end
context 'access to internals of an object' do
class CustomClass
def initialize(reference)
@instance_var = reference
end
private
def private_method
@instance_var
end
end
before(:each) do
@ref = 'instance'
@cc = CustomClass.new(@ref)
end
it 'has to have access to instance variables' do
expect { |block| @cc.tap_impl { @instance_var.tap_impl(&block) } }.to yield_with_args(@ref)
end
it 'has to have access to private methods' do
expect { |block| @cc.tap_impl { private_method.tap_impl(&block) } }.to yield_with_args(@ref)
end
end
end
| 22.372093 | 98 | 0.636175 |
21c218d8949b42d877fe2f68efdf09a173f985c4 | 83 | ActiveSupport::Inflector.inflections(:en) do |inflect|
inflect.acronym 'JWT'
end
| 20.75 | 54 | 0.771084 |
f87eb7e45dbeeec5819e1e9aed24f84810fc31de | 357 |
require 'commander/import'
require 'Confetti'
module Confetti
module Commands
class LsProj
def LsProj.command(c)
c.syntax = 'tt lsproj [options]'
c.summary = 'List activities'
c.description = c.summary
c.example 'List all projects', 'tt lsproj'
c.action LsProj
end
def initialize(args, options)
end
end
end # Commands
end # Confetti
| 13.730769 | 44 | 0.717087 |
1dd201ea0fa6eb898cab63a1e6492cb2930791b6 | 5,375 | =begin
PureCloud Platform API
With the PureCloud Platform API, you can control all aspects of your PureCloud environment. With the APIs you can access the system configuration, manage conversations and more.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
License: ININ
http://www.inin.com
Terms of Service: https://developer.mypurecloud.com/tos
=end
require 'date'
module PureCloud
class ObservationDataContainer
# A mapping from dimension to value
attr_accessor :group
attr_accessor :data
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'group' => :'group',
:'data' => :'data'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'group' => :'Hash<String, String>',
:'data' => :'Array<AggregateMetricData>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'group')
if (value = attributes[:'group']).is_a?(Array)
self.group = value
end
end
if attributes.has_key?(:'data')
if (value = attributes[:'data']).is_a?(Array)
self.data = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
group == o.group &&
data == o.data
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[group, data].hash
end
# build the object from hash
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
else
#TODO show warning in debug mode
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
else
# data not found in attributes(hash), not an issue as the data can be optional
end
end
self
end
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
_model = Object.const_get("PureCloud").const_get(type).new
_model.build_from_hash(value)
end
end
def to_s
to_hash.to_s
end
# to_body is an alias to to_body (backward compatibility))
def to_body
to_hash
end
# return the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Method to output non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 23.168103 | 177 | 0.572837 |
ffcef3411ce216e5ba2ee11b1b8e658c58ab692f | 581 | require File.dirname(__FILE__) + '/../../../spec_helper'
require 'stringio'
require 'zlib'
describe "GzipReader#read" do
before :each do
@data = '12345abcde'
@zip = "\037\213\b\000,\334\321G\000\00334261MLJNI\005\000\235\005\000$\n\000\000\000"
@io = StringIO.new @zip
end
it "reads the contents of a gzip file" do
gz = Zlib::GzipReader.new @io
gz.read.should == @data
end
it "reads the contents up to a certain size" do
gz = Zlib::GzipReader.new @io
gz.read(5).should == @data[0...5]
gz.read(5).should == @data[5...10]
end
end
| 20.034483 | 90 | 0.629948 |
26ca81398bcf6b74db63c9363cd879738cb42603 | 138 | class CreateItems < ActiveRecord::Migration
def change
create_table :items do |t|
t.timestamps null: false
end
end
end
| 15.333333 | 43 | 0.688406 |
acf32f8fce973c07db60f18aabc26c5e502d1505 | 1,163 | # frozen_string_literal: true
# ExploreController exposes a search interface for non-logged in (anonymous)
# users. It allows these users to search for public repositories.
class ExploreController < ActionController::Base
protect_from_forgery with: :exception
before_action :feature_enabled, only: [:index]
include Headers
include Pundit
layout "authentication"
# It's both the main page and the page where search results are shown.
def index
@current = search_params
if @current
repository = @current.split(":").first
repositories = policy_scope(Repository).includes(:stars).search(repository)
@repositories = API::Entities::Repositories.represent(repositories, type: :internal).to_json
else
@repositories = []
end
end
protected
# Returns a string with the search query, or nil if none was given.
def search_params
s = params.permit(explore: [:search])
return unless s[:explore]
s[:explore][:search]
end
# Redirect to the root page if this feature is not enabled.
def feature_enabled
redirect_to root_path unless APP_CONFIG.enabled?("anonymous_browsing")
end
end
| 27.690476 | 98 | 0.728289 |
288bae77735a37be3401b4a9bee612e47c2a84e4 | 2,362 | class User < ApplicationRecord
attr_accessor :remember_token, :activation_token, :reset_token
before_save :downcase_email
before_create :create_activation_digest
validates :name, presence: true, length: { maximum: 50 }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i
validates :email, presence: true, length: { maximum: 255 }, format: { with: VALID_EMAIL_REGEX }, uniqueness: { case_sensitive: false }
has_secure_password
validates :password, presence: true, length: { minimum: 6 }, allow_nil: true
def User.digest(string)
cost = ActiveModel::SecurePassword.min_cost ? BCrypt::Engine::MIN_COST :
BCrypt::Engine.cost
BCrypt::Password.create(string, cost: cost)
end
def User.new_token
SecureRandom.urlsafe_base64
end
def remember
self.remember_token = User.new_token
update_attribute(:remember_digest, User.digest(remember_token))
end
def authenticated?(remember_token)
return false if remember_digest.nil?
BCrypt::Password.new(remember_digest).is_password?(remember_token)
end
def forget
update_attribute(:remember_digest, nil)
end
# トークンがダイジェストと一致したらtrueを返す
def authenticated?(attribute, token)
digest = send("#{attribute}_digest")
return false if digest.nil?
BCrypt::Password.new(digest).is_password?(token)
end
# アカウントを有効にする
def activate
update_attribute(:activated, true)
update_attribute(:activated_at, Time.zone.now)
end
# 有効化用のメールを送信する
def send_activation_email
UserMailer.account_activation(self).deliver_now
end
# パスワード再設定の属性を設定する
def create_reset_digest
self.reset_token = User.new_token
update_attribute(:reset_digest, User.digest(reset_token))
update_attribute(:reset_sent_at, Time.zone.now)
end
# パスワード再設定のメールを送信する
def send_password_reset_email
UserMailer.password_reset(self).deliver_now
end
def password_reset_expired?
reset_sent_at < 2.hours.ago
end
private
def downcase_email
self.email = email.downcase
end
# 有効化トークンとダイジェストを作成および代入する
def create_activation_digest
self.activation_token = User.new_token
self.activation_digest = User.digest(activation_token)
end
end
| 28.119048 | 138 | 0.6884 |
ac4ccdfb723c09c034b42f81c1ccef6952784699 | 280 | class RenameBuildboxService < ActiveRecord::Migration[4.2]
def up
execute "UPDATE services SET type = 'BuildkiteService' WHERE type = 'BuildboxService';"
end
def down
execute "UPDATE services SET type = 'BuildboxService' WHERE type = 'BuildkiteService';"
end
end
| 28 | 91 | 0.735714 |
26a80da89ebc6ed90042391bc9f8077a512cfedf | 154 | module Resque
module Failure
class Rollbar < Base
def save
::Rollbar.report_exception(exception, payload)
end
end
end
end
| 15.4 | 54 | 0.642857 |
01712089ee26fe4bcff373c689e8aa0982ab3d5e | 135 | Schubert.template "add_user" do |r,opt|
up = "useradd -m -U #{opt[:name]}"
down "userdel -r #{opt[:name]}"
r.shell up, down
end
| 19.285714 | 39 | 0.607407 |
5d0144ed4f9224b222b7163dd45239bc6c15287f | 877 | module Feeble::Language::Ruby
include Feeble::Runtime
RSpec.describe Lambda do
subject(:creator) { described_class.new }
it "returns an invokable" do
new_lambda = creator.invoke([], [])
expect(new_lambda.is_a?(Invokable)).to eq(true)
end
it "returns an invokable with arity related to the passed params" do
zero_arity = creator.invoke([1])
expect(zero_arity.invoke).to eq 1
one_arity = creator.invoke([Symbol.new("a")], [2])
expect(one_arity.invoke(1)).to eq 2
two_arity = creator.invoke([
Symbol.new("a"), Symbol.new("b")], [3])
expect(two_arity.invoke(1, 2)).to eq 3
var_args = creator.invoke([
Symbol.new("*all")], [4])
expect(var_args.invoke(1, 2, 3)).to eq 4
expect(var_args.invoke(1, 2, 3, 4)).to eq 4
end
it "creates a lambda with properties"
end
end
| 27.40625 | 72 | 0.625998 |
6a9b509c5ed0bbdbd0358ab0d1cfc08e158ca116 | 3,377 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = true
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
config.action_controller.perform_caching = true
config.cache_store = :memory_store, { size: 64.megabytes }
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.182927 | 102 | 0.764288 |
6100a331a882c9531612dfe9ac97b3130536df97 | 380 | require_relative 'utils/intcode'
require 'set'
input = ARGF.read.split(?,).map(&:to_i)
def run(mem, origin_white: false)
pos = [0, 0]
dir = [-1, 0]
on_white = origin_white
visited = Set.new
b = Intcode.new(mem)
until b.halted?
b.continue(input: on_white ? 1 : 0)
color, turn = b.output.shift(2)
puts color
on_white = color
end
end
run(input)
| 15.2 | 39 | 0.631579 |
62f79afcb1a8a9870350db9f005a5567f56b8714 | 3,342 | # Copyright 2012, Dell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class NagiosService < ServiceObject
def create_proposal(name)
@logger.debug("Nagios create_proposal: entering")
base = super(name)
enab_raid = Barclamp.find_by_name("raid") != nil
enab_bios = Barclamp.find_by_name("bios") != nil
## all good and fine, but we're not officially suporting HW monitoring for now..
enab_raid = enab_ipmi = false
hash = base.current_config.config_hash
hash["nagios"]["monitor_raid"] = enab_raid
hash["nagios"]["monitor_ipmi"] = enab_ipmi
base.current_config.config_hash = hash
@logger.debug("Nagios create_proposal: exiting. IPMI: #{enab_raid}, RAID: #{enab_ipmi}")
base
end
def transition(inst, name, state)
@logger.debug("Nagios transition: make sure that network role is on all nodes: #{name} for #{state}")
#
# If we are discovering the node, make sure that we add the nagios client or server to the node
#
if state == "discovered"
@logger.debug("Nagios transition: discovered state for #{name} for #{state}")
prop = @barclamp.get_proposal(inst)
return [400, "Nagios Proposal is not active"] unless prop.active?
nodes = prop.active_config.get_nodes_by_role("nagios-server")
result = true
if nodes.empty?
@logger.debug("Nagios transition: make sure that nagios-server role is on first: #{name} for #{state}")
result = add_role_to_instance_and_node(name, inst, "nagios-server")
nodes = [ Node.find_by_name(name) ]
else
node = Node.find_by_name(name)
unless nodes.include? node
@logger.debug("Nagios transition: make sure that nagios-client role is on all nodes but first: #{name} for #{state}")
result = add_role_to_instance_and_node(name, inst, "nagios-client")
end
end
# Set up the client url
if result
# Get the server IP address
node = nodes.first
server_ip = node.address("public").addr rescue node.address.addr
unless server_ip.nil?
node = Node.find_by_name(name)
chash = prop.active_config.get_node_config_hash(node)
chash["crowbar"] = {} if chash["crowbar"].nil?
chash["crowbar"]["links"] = {} if chash["crowbar"]["links"].nil?
chash["crowbar"]["links"]["Nagios"] = "http://#{server_ip}/nagios3/cgi-bin/extinfo.cgi?type=1&host=#{node.name}"
prop.active_config.set_node_config_hash(node, chash)
end
end
@logger.debug("Nagios transition: leaving from discovered state for #{name} for #{state}")
a = [200, "" ] if result
a = [400, "Failed to add role to node"] unless result
return a
end
@logger.debug("Nagios transition: leaving for #{name} for #{state}")
[200, ""]
end
end
| 35.935484 | 127 | 0.666667 |
4a915a89b541496d0876bb9aa9563d1b69b681ee | 239 | class RenameGoverningBodiesPositionsToHubsPositions < ActiveRecord::Migration
def up
rename_table :governing_bodies_positions, :hubs_positions
end
def down
rename_table :hubs_positions, :governing_bodies_positions
end
end
| 23.9 | 77 | 0.820084 |
39ca47468c623730c0661193551513ff2e8aeef8 | 51 | module ProgrammingJokesGem
VERSION = "0.1.0"
end
| 12.75 | 26 | 0.745098 |
ff4e89638b69c1fefcea9189a17656b4cf9ca84e | 2,066 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
appPackage = JSON.parse(File.read(File.join('..', 'app', 'package.json')))
coreVersionDetected = appPackage['version']
coreVersionRequired = package['peerDependencies'][appPackage['name']]
firebase_sdk_version = appPackage['sdkVersions']['ios']['firebase']
if coreVersionDetected != coreVersionRequired
Pod::UI.warn "NPM package '#{package['name']}' depends on '#{appPackage['name']}' v#{coreVersionRequired} but found v#{coreVersionDetected}, this might cause build issues or runtime crashes."
end
Pod::Spec.new do |s|
s.name = "RNFBAdMob"
s.version = package["version"]
s.description = package["description"]
s.summary = <<-DESC
A well tested feature rich Firebase implementation for React Native, supporting iOS & Android.
DESC
s.homepage = "http://invertase.io/oss/react-native-firebase"
s.license = package['license']
s.authors = "Invertase Limited"
s.source = { :git => "https://github.com/invertase/react-native-firebase.git", :tag => "v#{s.version}" }
s.social_media_url = 'http://twitter.com/invertaseio'
s.ios.deployment_target = "10.0"
s.source_files = 'ios/**/*.{h,m}'
# React Native dependencies
s.dependency 'React-Core'
s.dependency 'RNFBApp'
# Other dependencies
s.dependency 'PersonalizedAdConsent', '~> 1.0.4'
if defined?($FirebaseSDKVersion)
Pod::UI.puts "#{s.name}: Using user specified Firebase SDK version '#{$FirebaseSDKVersion}'"
firebase_sdk_version = $FirebaseSDKVersion
end
# Firebase dependencies
s.dependency 'Firebase/AdMob', firebase_sdk_version
if defined?($RNFirebaseAsStaticFramework)
Pod::UI.puts "#{s.name}: Using overridden static_framework value of '#{$RNFirebaseAsStaticFramework}'"
s.static_framework = $RNFirebaseAsStaticFramework
else
s.static_framework = false
end
end
| 41.32 | 193 | 0.657793 |
1c099ca7b31710f02c93f8bb6a6521d02592f862 | 96 | module Beehive
module Taxonomies
class ApplicationJob < ActiveJob::Base
end
end
end
| 13.714286 | 42 | 0.729167 |
380e61ab49d1c0207b663fd1ee925c82d8601c0c | 1,174 | module Embulk
class JubatusClassifierOutputPlugin < OutputPlugin
require 'jubatus/classifier/client'
Plugin.register_output('jubatus_classifier', self)
def self.transaction(config, schema, processor_count, &control)
task = {
'host' => config.param('host', :string, :default => 'localhost'),
'port' => config.param('port', :integer, :default => 9199),
'name' => config.param('name', :string, :default => 'test'),
}
puts "Jubatus classfier output started."
yield(task)
puts "Jubatus classifier output finished."
return {}
end
def initialize(task, schema, index)
super
@juba = ::Jubatus::Classifier::Client::Classifier.new(task['host'], task['port'], task['name'])
end
def close
end
def add(page)
train_data = Array.new
page.each do |record|
key = record.shift
hash = Hash[record]
train_data.push([key, ::Jubatus::Common::Datum.new(hash)])
end
train_data.sort_by{rand}
@juba.train(train_data)
end
def finish
end
def abort
end
def commit
{}
end
end
end
| 23.019608 | 101 | 0.5954 |
e96d3cfd78c5a0308e76cf330b73d09f49105f15 | 880 | class UserDevice < ActiveRecord::Base
class RegisterService
delegate :errors, to: :user_device
def initialize(params, scoped)
@scoped = scoped
@params = params
end
def save
return true if user_device_exists?
if user_device.valid?
user_device.transaction do
wipe_existing_user_device!
user_device.save!
end
true
end
end
def user_device
@user_device ||= @scoped.new(
uuid: @params[:uuid],
platform: @params[:platform]
)
end
private
def user_device_exists?
@scoped.where(
uuid: @params[:uuid],
platform: @params[:platform]
).exists?
end
def wipe_existing_user_device!
UserDevice.where(
uuid: user_device.uuid,
platform: user_device.platform
).destroy_all
end
end
end
| 19.555556 | 40 | 0.6 |
26ff731c5e39792b1ac0b8cd9e57b4b3b3291bbd | 1,290 | module Toker
class SessionsController < ApplicationController
before_action :toke!, only: :destroy
def create
@user = authenticate_with_http_basic do |email, password|
user = User.find_by email: email
if user && user.authenticate(password)
user.token.destroy if user.token
user.token = Token.create expires_at: 1.year.from_now
user.token.generate_key!
user.token.save
user
end
end
if @user
response.headers['Authorization'] = "Token #{@user.token.key}"
render json: @user, status: :created
else
render json: { Unauthorized: 'Invalid email or password' }, status: :unauthorized
end
end
def update
token = authenticate_with_http_token do |jwt, options|
Token.decode(jwt)[0]
end
user = token.user if token
if user
response.headers['Authorization'] = "Token #{token.key}"
render json: user
else
render json: { Unauthorized: 'Invalid session' }, status: :unauthorized
end
end
def destroy
@user.token.destroy
head :no_content
end
private
def payload
{
user_id: @user.id,
exp: 1.year.from_now.to_i
}
end
end
end
| 24.807692 | 89 | 0.602326 |
33d5841bbe0dd5009bb29b2cf81eb69891e8eb3d | 2,546 | #!/usr/bin/ruby -w
# -*- coding: utf-8 -*-
###############################################################################
#
# Example of how to use the WriteExcel merge_cells() workbook
# method with complex formatting and rotation.
#
#
# reverse('©'), September 2002, John McNamara, [email protected]
#
# original written in Perl by John McNamara
# converted to Ruby by Hideo Nakamura, [email protected]
#
require 'writeexcel'
# Create a new workbook and add a worksheet
workbook = WriteExcel.new('merge5.xls')
worksheet = workbook.add_worksheet
# Increase the cell size of the merged cells to highlight the formatting.
(3..8).each { |col| worksheet.set_row(col, 36) }
[1, 3, 5].each { |n| worksheet.set_column(n, n, 15) }
###############################################################################
#
# Rotation 1, letters run from top to bottom
#
format1 = workbook.add_format(
:border => 6,
:bold => 1,
:color => 'red',
:valign => 'vcentre',
:align => 'centre',
:rotation => 270
)
worksheet.merge_range('B4:B9', 'Rotation 270', format1)
###############################################################################
#
# Rotation 2, 90° anticlockwise
#
format2 = workbook.add_format(
:border => 6,
:bold => 1,
:color => 'red',
:valign => 'vcentre',
:align => 'centre',
:rotation => 90
)
worksheet.merge_range('D4:D9', 'Rotation 90°', format2)
###############################################################################
#
# Rotation 3, 90° clockwise
#
format3 = workbook.add_format(
:border => 6,
:bold => 1,
:color => 'red',
:valign => 'vcentre',
:align => 'centre',
:rotation => -90
)
worksheet.merge_range('F4:F9', 'Rotation -90°', format3)
workbook.close
| 31.825 | 79 | 0.364101 |
030c54dec5250c211c50272321c275c5d4f5329b | 595 | cask "only-switch" do
version "2.0"
sha256 "a27d7f338769f092164feb5adaaeefda3b4ef364c108d039af6acff07ee9af1a"
url "https://github.com/jacklandrin/OnlySwitch/releases/download/release_#{version}/OnlySwitch.dmg"
name "OnlySwitch"
desc "System and utility switches"
homepage "https://github.com/jacklandrin/OnlySwitch"
depends_on macos: ">= :monterey"
app "Only Switch.app"
zap trash: [
"~/Library/Application Support/OnlySwitch",
"~/Library/Caches/jacklandrin.OnlySwitch",
"~/Library/OnlySwitch",
"~/Library/Preferences/jacklandrin.OnlySwitch.plist",
]
end
| 28.333333 | 101 | 0.742857 |
ab9aeb9d31d47b3388a107e35eccb015cf8b92f8 | 678 | cask 'mobile-mouse-server' do
version '3.3.4'
sha256 'd113c105a5ae3d20b06cd6e47354f890b06947373c726fb0f4970f3f48a4b047'
url "http://mobilemouse.com/downloads/OS_X_Server_#{version.dots_to_underscores}.dmg"
name 'Mobile Mouse Server'
homepage 'http://mobilemouse.com/'
app 'Mobile Mouse Server.app'
uninstall quit: 'com.rpatechnology.mobilemouse'
zap delete: [
'/Users/tangestani/Library/Caches/com.crashlytics.data/com.rpatechnology.mobilemouse',
'/Users/tangestani/Library/Caches/com.rpatechnology.mobilemouse',
'/Users/tangestani/Library/Preferences/com.rpatechnology.mobilemouse.plist',
]
end
| 35.684211 | 102 | 0.721239 |
211fa2ff091bc9a8b22eb323d1a1cb64a272e77c | 2,012 | # The MIT License (MIT)
# Copyright (c) 2018 Mike DeAngelo Looker Data Sciences, Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# frozen_string_literal: true
require_relative '../../command'
require_relative '../../modules/user'
require_relative '../../modules/filehelper'
module Gzr
module Commands
class User
class Cat < Gzr::Command
include Gzr::User
include Gzr::FileHelper
def initialize(user_id,options)
super()
@user_id = user_id
@options = options
end
def execute(input: $stdin, output: $stdout)
say_warning("options: #{@options.inspect}") if @options[:debug]
with_session do
data = query_user(@user_id,@options[:fields])
write_file(@options[:dir] ? "User_#{data.id}_#{data.display_name}.json" : nil, @options[:dir],nil, output) do |f|
f.puts JSON.pretty_generate(data.to_attrs)
end
end
end
end
end
end
end
| 37.962264 | 125 | 0.697813 |
e204a205f6665b6c24d3e7c16a6f75862f7ab3bc | 1,291 | module EphJpl
module Const
USAGE = <<-EOS
[USAGE] EphJpl.new(BIN_PATH, TARGET, CENTER, JD)
[ASTRO NO] (TARGET: 1 - 15, CENTER: 0 - 13)
1: Mercury, 2: Venus, 3: Earth, 4: Mars, 5: Jupiter,
6: Saturn, 7: Uranus, 8: Neptune, 9: Pluto, 10: Moon,
11: Sun, 12: Solar system Barycenter, 13: Earth-Moon barycenter,
14: Earth Nutations, 15: Lunar mantle Librations
* If TARGET = 14 or 15, CENTER = 0
* TARGET != CENTER
* 2287184.5 <= JD <= 2688976.5
EOS
MSG_ERR_1 = "Binary file path is invalid!"
MSG_ERR_2 = "Binary file is not found!"
MSG_ERR_3 = "TARGET is invalid!"
MSG_ERR_4 = "CENTER is invalid!"
MSG_ERR_5 = "TARGET == CENTER ?"
MSG_ERR_6 = "TARGET or CENTER is invalid!"
MSG_ERR_7 = "JD is invalid!"
MSG_ERR_8 = "KM flag is invalid!"
MSG_ERR_9 = "This library is supporting only DE430!"
EPOCH_PERIOD = [2287184.5, 2688976.5]
KSIZE = 2036
RECL = 4
ASTRS = [
"Mercury", "Venus", "Earth", "Mars", "Jupiter", "Saturn", "Uranus",
"Neptune", "Pluto", "Moon", "Sun", "Solar system Barycenter",
"Earth-Moon barycenter", "Earth Nutations", "Lunar mantle Librations"
]
KIND = 2
BARY = true
KM = false
end
end
| 34.891892 | 75 | 0.584818 |
6a0726f1f0ca010ed8fe9f8111b2740e56f76cc9 | 6,443 | # frozen_string_literal: true
module EE
module API
module Members
extend ActiveSupport::Concern
prepended do
params do
requires :id, type: String, desc: 'The ID of a group'
end
resource :groups, requirements: ::API::API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
desc 'Overrides the access level of an LDAP group member.' do
success Entities::Member
end
params do
requires :user_id, type: Integer, desc: 'The user ID of the member'
end
post ":id/members/:user_id/override" do
member = find_member(params)
result = ::Members::UpdateService
.new(current_user, { override: true })
.execute(member, permission: :override)
updated_member = result[:member]
if result[:status] == :success
present_member(updated_member)
else
render_validation_error!(updated_member)
end
end
desc 'Remove an LDAP group member access level override.' do
success Entities::Member
end
params do
requires :user_id, type: Integer, desc: 'The user ID of the member'
end
delete ":id/members/:user_id/override" do
member = find_member(params)
result = ::Members::UpdateService
.new(current_user, { override: false })
.execute(member, permission: :override)
updated_member = result[:member]
if result[:status] == :success
present_member(updated_member)
else
render_validation_error!(updated_member)
end
end
desc 'Approves a pending member'
params do
requires :member_id, type: Integer, desc: 'The ID of the member requiring approval'
end
put ':id/members/:member_id/approve' do
group = find_group!(params[:id])
member = ::Member.find_by_id(params[:member_id])
not_found! unless member
bad_request! unless group.root?
bad_request! unless can?(current_user, :admin_group_member, group)
result = ::Members::ActivateService
.new(group, member: member, current_user: current_user)
.execute
if result[:status] == :success
no_content!
else
bad_request!(result[:message])
end
end
desc 'Approves all pending members'
post ':id/members/approve_all' do
group = find_group!(params[:id])
bad_request! unless group.root?
bad_request! unless can?(current_user, :admin_group_member, group)
result = ::Members::ActivateService
.new(group, activate_all: true, current_user: current_user)
.execute
if result[:status] == :success
no_content!
else
bad_request!(result[:message])
end
end
desc 'Lists all pending members for a group including invited users'
params do
use :pagination
end
get ":id/pending_members" do
group = find_group!(params[:id])
bad_request! unless group.root?
bad_request! unless can?(current_user, :admin_group_member, group)
members = ::Member.distinct_awaiting_or_invited_for_group(group)
present paginate(members), with: ::API::Entities::PendingMember
end
desc 'Gets a list of billable users of root group.' do
success Entities::Member
end
params do
use :pagination
optional :search, type: String, desc: 'The exact name of the subscribed member'
optional :sort, type: String, desc: 'The sorting option', values: Helpers::MembersHelpers.member_sort_options
end
get ":id/billable_members" do
group = find_group!(params[:id])
bad_request!(nil) if group.subgroup?
bad_request!(nil) unless ::Ability.allowed?(current_user, :admin_group_member, group)
sorting = params[:sort] || 'id_asc'
result = BilledUsersFinder.new(group,
search_term: params[:search],
order_by: sorting).execute
present paginate(result[:users]),
with: ::EE::API::Entities::BillableMember,
current_user: current_user,
group_member_user_ids: result[:group_member_user_ids],
project_member_user_ids: result[:project_member_user_ids],
shared_group_user_ids: result[:shared_group_user_ids],
shared_project_user_ids: result[:shared_project_user_ids]
end
desc 'Get the memberships of a billable user of a root group.' do
success ::EE::API::Entities::BillableMembership
end
params do
requires :user_id, type: Integer, desc: 'The user ID of the member'
use :pagination
end
get ":id/billable_members/:user_id/memberships" do
group = find_group!(params[:id])
bad_request! unless can?(current_user, :admin_group_member, group)
bad_request! if group.subgroup?
user = ::User.find(params[:user_id])
not_found!('User') unless group.billed_user_ids[:user_ids].include?(user.id)
memberships = user.members.in_hierarchy(group).including_source
present paginate(memberships), with: ::EE::API::Entities::BillableMembership
end
desc 'Removes a billable member from a group or project.'
params do
requires :user_id, type: Integer, desc: 'The user ID of the member'
end
delete ":id/billable_members/:user_id" do
group = find_group!(params[:id])
result = ::BillableMembers::DestroyService.new(group, user_id: params[:user_id], current_user: current_user).execute
if result[:status] == :success
no_content!
else
bad_request!(result[:message])
end
end
end
end
end
end
end
| 35.016304 | 128 | 0.569455 |
ab6a8a0b8087e3199f33b2611c9429c2e32a54e1 | 3,220 | class Mypy < Formula
desc "Experimental optional static type checker for Python"
homepage "http://www.mypy-lang.org/"
url "https://github.com/python/mypy.git",
:tag => "v0.580",
:revision => "f38596dc3f078ac6705c08632a3043d8e3c9c1d5"
head "https://github.com/python/mypy.git"
bottle do
cellar :any_skip_relocation
sha256 "9b69cb0b45fcf7e21f31af35984f6b68cea1a0fb0a83093d3b8ad60edc6d1923" => :high_sierra
sha256 "9f9d3d159393bfc690cc524507ca6f058609569d2e25e70740fdce3aff8aff9c" => :sierra
sha256 "db2544a558fd5ac6afb0483439f3faeff8240810911d4e0df71b526299708fd6" => :el_capitan
sha256 "c9e22111fff64b50a3e2e6a58e562a3d7cfff604ded88c7b009932fe3bca301b" => :x86_64_linux
end
option "without-sphinx-doc", "Don't build documentation"
deprecated_option "without-docs" => "without-sphinx-doc"
depends_on "python"
depends_on "sphinx-doc" => [:build, :recommended]
resource "psutil" do
url "https://files.pythonhosted.org/packages/e2/e1/600326635f97fee89bf8426fef14c5c29f4849c79f68fd79f433d8c1bd96/psutil-5.4.3.tar.gz"
sha256 "e2467e9312c2fa191687b89ff4bc2ad8843be4af6fb4dc95a7cc5f7d7a327b18"
end
resource "sphinx_rtd_theme" do
url "https://files.pythonhosted.org/packages/8b/e5/b1933472424b30affb0a8cea8f0ef052a31ada96e5d1823911d7f4bfdf8e/sphinx_rtd_theme-0.2.4.tar.gz"
sha256 "2df74b8ff6fae6965c527e97cca6c6c944886aae474b490e17f92adfbe843417"
end
resource "typed-ast" do
url "https://files.pythonhosted.org/packages/52/cf/2ebc7d282f026e21eed4987e42e10964a077c13cfc168b42f3573a7f178c/typed-ast-1.1.0.tar.gz"
sha256 "57fe287f0cdd9ceaf69e7b71a2e94a24b5d268b35df251a88fef5cc241bf73aa"
end
def install
xy = Language::Python.major_minor_version "python3"
if build.with? "sphinx-doc"
# https://github.com/python/mypy/issues/2593
version_static = buildpath/"mypy/version_static.py"
version_static.write "__version__ = '#{version}'\n"
inreplace "docs/source/conf.py", "mypy.version", "mypy.version_static"
(buildpath/"docs/sphinx_rtd_theme").install resource("sphinx_rtd_theme")
# Inject sphinx_rtd_theme's path into sys.path
inreplace "docs/source/conf.py",
"sys.path.insert(0, os.path.abspath('../..'))",
"sys.path[:0] = [os.path.abspath('../..'), os.path.abspath('../sphinx_rtd_theme')]"
system "make", "-C", "docs", "html"
doc.install Dir["docs/build/html/*"]
rm version_static
end
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python#{xy}/site-packages"
resources.each do |r|
r.stage do
system "python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python#{xy}/site-packages"
system "python3", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
(testpath/"broken.py").write <<~EOS
def p() -> None:
print('hello')
a = p()
EOS
output = pipe_output("#{bin}/mypy broken.py 2>&1")
assert_match '"p" does not return a value', output
end
end
| 38.795181 | 146 | 0.721739 |
b9c7dc3d71c9760908ed357841d7e5a561c97475 | 1,620 | require 'test_helper'
class FollowingTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
@other = users(:archer)
log_in_as(@user)
end
test "following page" do
get following_user_path(@user)
assert_not @user.following.empty?
assert_match @user.following.count.to_s, response.body
@user.following.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "followers page" do
get followers_user_path(@user)
assert_not @user.followers.empty?
assert_match @user.followers.count.to_s, response.body
@user.followers.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "should follow a user the standard way" do
assert_difference '@user.following.count', 1 do
post relationships_path, params: { followed_id: @other.id }
end
end
test "should follow a user with Ajax" do
assert_difference '@user.following.count', 1 do
post relationships_path, xhr: true, params: { followed_id: @other.id }
end
end
test "should unfollow a user the standard way" do
@user.follow(@other)
relationship = @user.active_relationships.find_by(followed_id: @other.id)
assert_difference '@user.following.count', -1 do
delete relationship_path(relationship)
end
end
test "should unfollow a user with Ajax" do
@user.follow(@other)
relationship = @user.active_relationships.find_by(followed_id: @other.id)
assert_difference '@user.following.count', -1 do
delete relationship_path(relationship), xhr: true
end
end
end
| 27.931034 | 77 | 0.695062 |
26c692cdba612ad3c0e38c6c8cbd46333972ff5d | 1,629 | # frozen_string_literal: true
module Authlogic
# Represents the credentials *in* the cookie. The value of the cookie.
# This is primarily a data object. It doesn't interact with controllers.
# It doesn't know about eg. cookie expiration.
#
# @api private
class CookieCredentials
# @api private
class ParseError < RuntimeError
end
DELIMITER = "::"
attr_reader :persistence_token, :record_id, :remember_me_until
# @api private
# @param persistence_token [String]
# @param record_id [String, Numeric]
# @param remember_me_until [ActiveSupport::TimeWithZone]
def initialize(persistence_token, record_id, remember_me_until)
@persistence_token = persistence_token
@record_id = record_id
@remember_me_until = remember_me_until
end
class << self
# @api private
def parse(string)
parts = string.split(DELIMITER)
unless (1..3).cover?(parts.length)
raise ParseError, format("Expected 1..3 parts, got %d", parts.length)
end
new(parts[0], parts[1], parse_time(parts[2]))
end
private
# @api private
def parse_time(string)
return if string.nil?
::Time.parse(string)
rescue ::ArgumentError => e
raise ParseError, format("Found cookie, cannot parse remember_me_until: #{e}")
end
end
# @api private
def remember_me?
!@remember_me_until.nil?
end
# @api private
def to_s
[
@persistence_token,
@record_id.to_s,
@remember_me_until&.iso8601
].compact.join(DELIMITER)
end
end
end
| 25.453125 | 86 | 0.644567 |
d53ee2d069b8b7f96efd1b4f952712c863b16e20 | 6,253 | require 'spec_helper'
require 'fakefs/spec_helpers'
require 'zip'
module LicenseFinder
def self.broken_fakefs?
RUBY_PLATFORM =~ /java/ || RUBY_VERSION =~ /^(1\.9|2\.0)/
end
describe Nuget do
it_behaves_like 'a PackageManager'
describe '#assemblies' do
include FakeFS::SpecHelpers
before do
FileUtils.mkdir_p 'app/packages'
FileUtils.mkdir_p 'app/Assembly1/'
FileUtils.mkdir_p 'app/Assembly1.Tests/'
FileUtils.mkdir_p 'app/Assembly2/'
FileUtils.touch 'app/Assembly1/packages.config'
FileUtils.touch 'app/Assembly1.Tests/packages.config'
FileUtils.touch 'app/Assembly2/packages.config'
end
it 'finds dependencies all subdirectories containing a packages.config' do
nuget = Nuget.new project_path: Pathname.new('app')
expect(nuget.assemblies.map(&:name)).to match_array ['Assembly1', 'Assembly1.Tests', 'Assembly2']
end
context 'when packages.config is in .nuget directory' do
before do
FileUtils.mkdir_p 'app/.nuget'
FileUtils.touch 'app/.nuget/packages.config'
end
it 'finds dependencies all subdirectories containing a packages.config' do
nuget = Nuget.new project_path: Pathname.new('app')
expect(nuget.assemblies.map(&:name)).to include('.nuget')
end
end
end
describe '#detected_package_path' do
include FakeFS::SpecHelpers
context 'when .nupkg files exist, but are not in .nuget directory' do
before do
FileUtils.mkdir_p 'app/submodule/vendor'
FileUtils.touch 'app/submodule/vendor/package.nupkg'
FileUtils.mkdir_p 'app/vendor'
FileUtils.touch 'app/vendor/package.nupkg'
end
it 'returns vendored directory' do
nuget = Nuget.new project_path: Pathname.new('app')
expect(nuget.detected_package_path).to eq Pathname('/app/vendor')
end
end
context 'when .nuget exists' do
before do
FileUtils.mkdir_p 'app/.nuget'
end
it 'returns the packages.config file path' do
nuget = Nuget.new project_path: Pathname.new('app')
expect(nuget.detected_package_path).to eq Pathname('app/.nuget')
end
end
context 'when vendor/*.nupkg and .nuget/ are not present but packages.config file exists' do
before do
FileUtils.mkdir_p 'app'
FileUtils.touch 'app/packages.config'
end
it 'returns the packages.config file' do
nuget = Nuget.new project_path: Pathname.new('app')
expect(nuget.detected_package_path).to eq Pathname('app/packages.config')
end
end
end
describe '#current_packages' do
include FakeFS::SpecHelpers
before do
FileUtils.mkdir_p 'app/packages'
FileUtils.mkdir_p 'app/Assembly1/'
FileUtils.mkdir_p 'app/Assembly1.Tests/'
FileUtils.mkdir_p 'app/Assembly2/'
FileUtils.touch 'app/Assembly1/packages.config'
FileUtils.touch 'app/Assembly1.Tests/packages.config'
FileUtils.touch 'app/Assembly2/packages.config'
end
let(:assembly_1_packages) do
<<-ONE
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="GoToDependency" version="4.84.4790.14417" targetFramework="net45" />
<package id="ObscureDependency" version="1.3.15" targetFramework="net45" />
<package id="OtherObscureDependency" version="2.4.2" targetFramework="net45" />
</packages>
ONE
end
let(:assembly_1_tests_packages) do
<<-ONE
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="GoToDependency" version="4.84.4790.14417" targetFramework="net45" />
<package id="TestFramework" version="5.0.1" targetFramework="net45" />
</packages>
ONE
end
let(:assembly_2_packages) do
<<-ONE
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="ObscureDependency" version="1.3.15" targetFramework="net45" />
<package id="CoolNewDependency" version="2.4.2" targetFramework="net45" />
</packages>
ONE
end
before do
File.write('app/Assembly1/packages.config', assembly_1_packages)
File.write('app/Assembly1.Tests/packages.config', assembly_1_tests_packages)
File.write('app/Assembly2/packages.config', assembly_2_packages)
end
it 'lists all the packages used in an assembly' do
nuget = Nuget.new project_path: Pathname.new('app')
deps = %w[GoToDependency
ObscureDependency
OtherObscureDependency
TestFramework
CoolNewDependency]
expect(nuget.current_packages.map(&:name).uniq).to match_array(deps)
end
# cannot run on JRuby due to https://github.com/fakefs/fakefs/issues/303
context 'when there is a .nupkg file', skip: LicenseFinder.broken_fakefs? do
before do
obscure_dependency_nuspec = <<-XML
<?xml version="1.0"?>
<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
<metadata>
<id>ObscureDependency</id>
<version>1.3.15</version>
<licenseUrl>http://www.opensource.org/licenses/mit-license.php</licenseUrl>
</metadata>
</package>
XML
File.write('app/packages/ObscureDependency.nuspec', obscure_dependency_nuspec)
Dir.chdir 'app/packages' do
Zip::File.open('ObscureDependency.1.3.15.nupkg', Zip::File::CREATE) do |zipfile|
zipfile.add('ObscureDependency.nuspec', 'ObscureDependency.nuspec')
end
end
end
it 'include the licenseUrl from the nuspec file' do
nuget = Nuget.new project_path: Pathname.new('app')
obscure_dep = nuget.current_packages.select { |dep| dep.name == 'ObscureDependency' }.first
expect(obscure_dep.license_names_from_spec).to eq(['http://www.opensource.org/licenses/mit-license.php'])
end
end
end
end
end
| 35.936782 | 115 | 0.626739 |
e2729ab0bc8f588dd8c8c42cc513c34635813aa8 | 1,533 | # http://www.codewars.com/kata/common-substrings
# --- iteration 1 ---
def substring_test(str1, str2)
return false unless str1.size > 1 && str2.size > 1
puts "str1: #{str1.inspect}"
puts "str2: #{str2.inspect}"
str1_down, str2_down = [str1, str2].map(&:downcase)
smaller, larger = [str1_down, str2_down].sort_by(&:size)
substrings = []
smaller.chars.each_with_index do |x, i|
if i+1 < smaller.size
substrings << smaller[i, 2]
else
end
end
substrings.each do |substr|
return true if /#{substr}/ === larger
end
false
end
# --- iteration 2 ---
def substring_test(str1, str2)
return false unless str1.size > 1 && str2.size > 1
str1_down, str2_down = [str1, str2].map(&:downcase)
smaller, larger = [str1_down, str2_down].sort_by(&:size)
substrings = []
smaller.chars.each_with_index do |x, i|
if i+1 < smaller.size
substrings << smaller[i, 2]
else
end
end
substrings.any? { |substr| /#{substr}/ === larger }
end
# --- iteration 3 ---
def substring_test(str1, str2)
return false unless str1.size > 1 && str2.size > 1
smaller, larger = [str1, str2].map(&:downcase).sort_by(&:size)
substrings = []
smaller.chars.each_with_index do |x, i|
if i+1 < smaller.size
substrings << smaller[i, 2]
else
end
end
substrings.any? { |substr| /#{substr}/ === larger }
end
# --- iteration 4 ---
def substring_test(str1, str2)
sm, lg = [str1, str2].map(&:downcase).sort_by(&:size)
sm.chars.each_cons(2).any? { |x| /#{x.join}/ === lg }
end
| 24.725806 | 64 | 0.630789 |
089899ea857d7cab7630f19ff63cd069738532ca | 1,714 | class DrugsController < ApplicationController
actions_without_auth :index, :existence, :name_suggestion, :local_name_suggestion
def index
drugs = Drug.page(params[:page])
.per(params[:count])
drugs = name_search(pubchem_id_search(drugs))
render json: drugs.map { |d| { name: d.name, pubchem_id: d.pubchem_id } }
end
def existence
proposed_pubchem_id = params[:pubchem_id]
(to_render, status) = if drug = Drug.find_by(pubchem_id: proposed_pubchem_id)
[{ name: drug.name, pubchem_id: drug.pubchem_id }, :ok]
else drug_name = Scrapers::PubChem.get_name_from_pubchem_id(proposed_pubchem_id)
if drug_name.present?
[{ name: drug_name, pubchem_id: proposed_pubchem_id }, :ok]
else
[{}, :not_found]
end
end
render json: to_render, status: status
end
def name_suggestion
if params[:q].blank?
render json: {errors: ['Must specify a query with parameter q']}, status: :bad_request
else
render json: DrugNameSuggestion.suggestions_for_name(params[:q]), status: :ok
end
end
def local_name_suggestion
if params[:q].blank?
render json: {errors: ['Must specify a query with parameter q']}, status: :bad_request
else
render json: DrugNameSuggestion.get_local_suggestions(params[:q]), status: :ok
end
end
private
def name_search(query)
if params[:name].present?
query.where('drugs.name ILIKE :name', name: "#{params[:name]}%")
else
query
end
end
def pubchem_id_search(query)
if params[:pubchem_id].present?
query.where('drugs.pubchem_id ILIKE :pubchem_id', pubchem_id: "#{params[:pubchem_id]}%")
else
query
end
end
end
| 28.098361 | 94 | 0.676196 |
7905aee51b82c72da77a31ccb510675fc9cb93ff | 1,599 | #!/usr/bin/env ruby
# Copyright (c) 2008-2012, Edd Barrett <[email protected]>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# RBlatter
# $Id: tlpdbindex.rb,v 1.3 2012/12/17 20:28:04 edd Exp $
#
# Index a tlpdb file for faster lookups
class TlpdbIndex
attr_reader :index
# Make a new index based upon the tlpdb file '+file+'.
# * +file+ is a the tlpdb
def initialize(file, options = nil)
@options = options || {}
@index = {}
@dbFile = file
makeIndex
end
# Print debug info about this index database
def dumpIndex()
@index.each do | key, val |
puts "#{key} : #{val}"
end
end
private
# Parse and index the tlpdb file
def makeIndex()
puts "Indexing TlpDB..." unless @options[:quiet]
lineno = 1
for line in @dbFile do
if line =~ /^name (.*)/ then
@index[$1] = lineno
end
lineno = lineno.next
end
puts "Done. #{@index.size} packages" unless @options[:quiet]
end
end
| 28.052632 | 74 | 0.707942 |
e8efe70e2c24d89295aae305089e8967d6012bca | 2,910 |
require "capybara/rspec"
require "./app"
require "pry"
require('spec_helper')
Capybara.app = Sinatra::Application
set(:show_exceptions, false)
# Your project should be set up so that a volunteer can only be created if a project already exists. (This makes it easier to assign the one to many relationship in Sinatra.) Focus on getting one integration spec passing at a time.
# The user should be able to visit the home page and fill out a form to add a new project. When that project is created, the application should direct them back to the homepage.
describe 'the project creation path', {:type => :feature} do
it 'takes the user to the homepage where they can create a project' do
visit '/'
fill_in('project_name', :with => 'Teaching Kids to Code')
click_button('Create Project')
expect(page).to have_content('Teaching Kids to Code')
end
end
# A user should be able to click on a project to see its detail. The detail page includes a form where the project can be updated. When the form is submitted, the user can be directed to either the home page or that project's detail page. (The test will work for either.)
describe 'the project update path', {:type => :feature} do
it 'allows a user to change the name of the project' do
test_project = Project.new({:name => 'Teaching Kids to Code', :id => nil})
test_project.save
visit '/'
click_link('Teaching Kids to Code')
click_link('Edit!')
fill_in('name', :with => 'Teaching Ruby to Kids')
click_button('Update!')
expect(page).to have_content('Teaching Ruby to Kids')
end
end
# A user should be able to nagivate to a project's detail page and delete the project. The user will then be directed to the index page. The project should no longer be on the list of projects.
describe 'the project delete path', {:type => :feature} do
it 'allows a user to delete a project' do
test_project = Project.new({:name => 'Teaching Kids to Code', :id => nil})
test_project.save
id = test_project.id
visit "/projects/#{id}/edit"
click_button('Delete!')
visit '/'
expect(page).not_to have_content("Teaching Kids to Code")
end
end
# The user should be able to click on a project detail page and see a list of all volunteers working on that project. The user should be able to click on a volunteer to see the volunteer's detail page.
describe 'the volunteer detail page path', {:type => :feature} do
it 'shows a volunteer detail page' do
test_project = Project.new({:name => 'Teaching Kids to Code', :id => nil})
test_project.save
project_id = test_project.id.to_i
test_volunteer = Volunteer.new({:name => 'Jasmine', :project_id => project_id, :id => nil})
test_volunteer.save
visit "/projects/#{project_id}"
click_link('Jasmine')
fill_in('name', :with => 'Jane')
click_button('Rename!')
expect(page).to have_content('Jane')
end
end
| 42.794118 | 271 | 0.710997 |
e21e3b34063da412b86d307c78cff8e60a5ac87e | 340 | cask 'font-poller-one' do
version :latest
sha256 :no_check
# github.com/google/fonts was verified as official when first introduced to the cask
url 'https://github.com/google/fonts/raw/master/ofl/pollerone/PollerOne.ttf'
name 'Poller One'
homepage 'https://www.google.com/fonts/specimen/Poller+One'
font 'PollerOne.ttf'
end
| 28.333333 | 86 | 0.75 |
f71417276018236f5dc7fbbc9ecf50bbd0409e37 | 1,121 | $:.push File.expand_path('../lib', __FILE__)
# Maintain your gem's version:
require 'symmetric_encryption/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'symmetric-encryption'
s.version = SymmetricEncryption::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ['Reid Morrison']
s.email = ['[email protected]']
s.homepage = 'http://rocketjob.github.io/symmetric-encryption/'
s.summary = 'Encryption for Ruby, and Ruby on Rails'
s.description = 'Transparently encrypt ActiveRecord, Mongoid, and MongoMapper attributes. Encrypt passwords in configuration files. Encrypt entire files at rest.'
s.files = Dir['{lib,examples}/**/*', 'LICENSE.txt', 'Rakefile', 'README.md']
s.test_files = Dir['test/**/*']
s.license = 'Apache-2.0'
s.required_ruby_version = '>= 2.1'
s.bindir = 'bin'
s.executables = ['symmetric-encryption']
s.add_dependency 'coercible', '~> 1.0'
end
| 46.708333 | 174 | 0.595897 |
ac7abcaf91da28edec423918d7ec7432a50aa2df | 3,366 | require "spec_helper"
describe Mongoid::Relations::Bindings::Embedded::In do
let(:person) do
Person.new
end
let(:name) do
Name.new
end
let(:address) do
Address.new
end
let(:name_metadata) do
Name.relations["namable"]
end
let(:address_metadata) do
Address.relations["addressable"]
end
let(:person_metadata) do
Person.relations["addresses"]
end
describe "#bind" do
context "when the child of an embeds one" do
let(:binding) do
described_class.new(name, person, name_metadata)
end
context "when the document is bindable" do
before do
binding.bind
end
it "parentizes the documents" do
name._parent.should == person
end
it "sets the inverse relation" do
person.name.should == name
end
end
context "when the document is not bindable" do
before do
person.name = name
end
it "does nothing" do
name.expects(:namable=).never
binding.bind
end
end
end
context "when the child of an embeds many" do
let(:binding) do
described_class.new(address, person, address_metadata)
end
context "when the document is bindable" do
context "when the base has no metadata" do
before do
binding.bind
end
it "parentizes the documents" do
address._parent.should == person
end
it "sets the inverse relation" do
person.addresses.should include(address)
end
end
context "when the base has metadata" do
before do
address.metadata = person_metadata
end
it "does not overwrite the existing metadata" do
address.expects(:metadata=).never
binding.bind
end
end
end
context "when the document is not bindable" do
before do
person.addresses = [ address ]
end
it "does nothing" do
address.expects(:addressable=).never
binding.bind
end
end
end
end
describe "#unbind" do
context "when the child of an embeds one" do
let(:binding) do
described_class.new(name, person, name_metadata)
end
context "when the document is unbindable" do
before do
binding.bind
binding.unbind
end
it "removes the inverse relation" do
person.name.should be_nil
end
end
context "when the document is not unbindable" do
it "does nothing" do
name.expects(:namable=).never
binding.unbind
end
end
end
context "when the child of an embeds many" do
let(:binding) do
described_class.new(address, person, address_metadata)
end
context "when the document is unbindable" do
before do
binding.bind
binding.unbind
end
it "removes the inverse relation" do
person.addresses.should be_empty
end
end
context "when the document is not unbindable" do
it "does nothing" do
address.expects(:addressable=).never
binding.unbind
end
end
end
end
end
| 19.569767 | 62 | 0.574272 |
3303b148a670900f8f73eaa78dfc84ffae5848be | 91 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe NoteDecorator do
end
| 13 | 31 | 0.824176 |
62e1d67c5e26caf95cf501dd7870fc81ae07b0fb | 808 | # encoding: utf-8
$:.unshift File.expand_path('../lib', __FILE__)
require 'glynn/version'
Gem::Specification.new do |s|
s.name = "glynn"
s.version = Glynn::VERSION
s.authors = ["Damien MATHIEU"]
s.email = "[email protected]"
s.description = "Deploy a jekyll weblog through ftp"
s.summary = "Deploy a jekyll weblog through ftp"
s.homepage = "https://github.com/dmathieu/glynn"
s.license = "MIT"
s.files = `git ls-files app lib`.split("\n")
s.platform = Gem::Platform::RUBY
s.require_path = 'lib'
s.executables = ['glynn']
s.add_development_dependency "bundler"
s.add_development_dependency "minitest"
s.add_dependency('jekyll', [">= 0"])
s.add_dependency('netrc', [">= 0"])
s.add_dependency('highline', [">= 1.5"])
end
| 27.862069 | 55 | 0.626238 |
18bbba8e5ee70b0b4bca7729743c7578b3aeec44 | 7,973 | #
# Cookbook Name:: redisio
# Provider::install
#
# Copyright 2012, Brian Bianco <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
action :run do
@tarball = "#{new_resource.base_name}#{new_resource.version}.#{new_resource.artifact_type}"
unless ( current_resource.version == new_resource.version || (redis_exists? && new_resource.safe_install) )
Chef::Log.info("Installing Redis #{new_resource.version} from source")
download
unpack
build
install
end
configure
end
def download
Chef::Log.info("Downloading redis tarball from #{new_resource.download_url}")
remote_file "#{new_resource.download_dir}/#{@tarball}" do
source new_resource.download_url
end
end
def unpack
case new_resource.artifact_type
when "tar.gz",".tgz"
execute "cd #{new_resource.download_dir} && tar zxf #{@tarball}"
else
raise Chef::Exceptions::UnsupportedAction, "Current package type #{new_resource.artifact_type} is unsupported"
end
end
def build
execute"cd #{new_resource.download_dir}/#{new_resource.base_name}#{new_resource.version} && make clean && make"
end
def install
execute "cd #{new_resource.download_dir}/#{new_resource.base_name}#{new_resource.version} && make install"
new_resource.updated_by_last_action(true)
end
def configure
base_piddir = new_resource.base_piddir
version_hash = RedisioHelper.version_to_hash(new_resource.version)
#Setup a configuration file and init script for each configuration provided
new_resource.servers.each do |current_instance|
#Retrieve the default settings hash and the current server setups settings hash.
current_instance_hash = current_instance.to_hash
current_defaults_hash = new_resource.default_settings.to_hash
#Merge the configuration defaults with the provided array of configurations provided
current = current_defaults_hash.merge(current_instance_hash)
recipe_eval do
piddir = "#{base_piddir}/#{current['port']}"
aof_file = "#{current['datadir']}/appendonly-#{current['port']}.aof"
rdb_file = "#{current['datadir']}/dump-#{current['port']}.rdb"
#Create the owner of the redis data directory
user current['user'] do
comment 'Redis service account'
supports :manage_home => true
home current['homedir']
shell current['shell']
end
#Create the redis configuration directory
directory current['configdir'] do
owner 'root'
group 'root'
mode '0755'
recursive true
action :create
end
#Create the instance data directory
directory current['datadir'] do
owner current['user']
group current['group']
mode '0775'
recursive true
action :create
end
#Create the pid file directory
directory piddir do
owner current['user']
group current['group']
mode '0755'
recursive true
action :create
end
#Create the log directory if syslog is not being used
directory ::File.dirname("#{current['logfile']}") do
owner current['user']
group current['group']
mode '0755'
recursive true
action :create
only_if { current['syslogenabled'] != 'yes' && current['logfile'] && current['logfile'] != 'stdout' }
end
#Create the log file is syslog is not being used
file current['logfile'] do
owner current['user']
group current['group']
mode '0644'
backup false
action :touch
only_if { current['logfile'] && current['logfile'] != 'stdout' }
end
#Set proper permissions on the AOF or RDB files
file aof_file do
owner current['user']
group current['group']
mode '0644'
only_if { current['backuptype'] == 'aof' || current['backuptype'] == 'both' }
only_if { ::File.exists?(aof_file) }
end
file rdb_file do
owner current['user']
group current['group']
mode '0644'
only_if { current['backuptype'] == 'rdb' || current['backuptype'] == 'both' }
only_if { ::File.exists?(rdb_file) }
end
#Lay down the configuration files for the current instance
template "#{current['configdir']}/#{current['port']}.conf" do
source 'redis.conf.erb'
cookbook 'redisio'
owner current['user']
group current['group']
mode '0644'
variables({
:version => version_hash,
:piddir => piddir,
:port => current['port'],
:address => current['address'],
:databases => current['databases'],
:backuptype => current['backuptype'],
:datadir => current['datadir'],
:timeout => current['timeout'],
:loglevel => current['loglevel'],
:logfile => current['logfile'],
:syslogenabled => current['syslogenabled'],
:syslogfacility => current['syslogfacility'],
:save => current['save'],
:slaveof => current['slaveof'],
:masterauth => current['masterauth'],
:slaveservestaledata => current['slaveservestaledata'],
:replpingslaveperiod => current['replpingslaveperiod'],
:repltimeout => current['repltimeout'],
:requirepass => current['requirepass'],
:maxclients => current['maxclients'],
:maxmemory => current['maxmemory'],
:maxmemorypolicy => current['maxmemorypolicy'],
:maxmemorysamples => current['maxmemorysamples'],
:appendfsync => current['appendfsync'],
:noappendfsynconrewrite => current['noappendfsynconrewrite'],
:aofrewritepercentage => current['aofrewritepercentage'] ,
:aofrewriteminsize => current['aofrewriteminsize'],
:stopwritesonbgsaveerror => current['stopwritesonbgsaveerror'],
:includes => current['includes']
})
end
#Setup init.d file
template "/etc/init.d/redis#{current['port']}" do
source 'redis.init.erb'
cookbook 'redisio'
owner 'root'
group 'root'
mode '0755'
variables({
:port => current['port'],
:address => current['address'],
:user => current['user'],
:configdir => current['configdir'],
:piddir => piddir,
:requirepass => current['requirepass'],
:platform => node['platform']
})
end
end
end # servers each loop
end
def redis_exists?
exists = Chef::ShellOut.new("which redis-server")
exists.run_command
exists.exitstatus == 0 ? true : false
end
def version
if redis_exists?
redis_version = Chef::ShellOut.new("redis-server -v")
redis_version.run_command
version = redis_version.stdout[/version (\d*.\d*.\d*)/,1] || redis_version.stdout[/v=(\d*.\d*.\d*)/,1]
Chef::Log.info("The Redis server version is: #{version}")
return version.gsub("\n",'')
end
nil
end
def load_current_resource
@current_resource = Chef::Resource::RedisioInstall.new(new_resource.name)
@current_resource.version(version)
@current_resource
end
| 35.753363 | 116 | 0.6147 |
790c2a1969db73381690c9b7c9662724e12789b0 | 931 | require "spec_helper"
require "hamster/list"
describe Hamster::List do
[:union, :|].each do |method|
describe "##{method}" do
it "is lazy" do
-> { Hamster.stream { fail }.union(Hamster.stream { fail }) }.should_not raise_error
end
[
[[], [], []],
[["A"], [], ["A"]],
[%w[A B C], [], %w[A B C]],
[%w[A A], ["A"], ["A"]],
].each do |a, b, expected|
describe "returns #{expected.inspect}" do
before do
@a = Hamster.list(*a)
@b = Hamster.list(*b)
end
it "for #{a.inspect} and #{b.inspect}" do
@result = @a.send(method, @b)
end
it "for #{b.inspect} and #{a.inspect}" do
@result = @b.send(method, @a)
end
after do
@result.should == Hamster.list(*expected)
end
end
end
end
end
end
| 18.62 | 92 | 0.443609 |
3854dd27e5fe019e43fe5da97e558eea8495d5e2 | 1,415 | require 'spec_helper'
describe Myaccount::OverviewsController do
render_views
before(:each) do
activate_authlogic
@user = create(:user)
login_as(@user)
end
it "show action should render show template" do
get :show
response.should render_template(:show)
end
it "show action should render show template" do
@address = create(:address, :addressable => @user)
@user.stubs(:shipping_address).returns(@address)
get :show
response.should render_template(:show)
end
it "edit action should render edit template" do
get :edit
response.should render_template(:edit)
end
it "update action should render edit template when model is invalid" do
User.any_instance.stubs(:valid?).returns(false)
put :update, :user => @user.attributes.reject {|k,v| ![ 'first_name', 'last_name', 'password','birth_date'].include?(k)}
response.should render_template(:edit)
end
it "update action should redirect when model is valid" do
User.any_instance.stubs(:valid?).returns(true)
put :update, :user => @user.attributes.reject {|k,v| ![ 'first_name', 'last_name', 'password','birth_date'].include?(k)}
response.should redirect_to(myaccount_overview_url())
end
end
describe Myaccount::OverviewsController do
render_views
it "not logged in should redirect to login page" do
get :show
response.should redirect_to(login_url)
end
end
| 27.745098 | 124 | 0.713074 |
6a07aa18693b023692982b311f7cb8aae6c0f4d9 | 7,228 | require 'spec_helper'
require 'pdk/module/update_manager'
describe PDK::Module::UpdateManager do
subject(:update_manager) { described_class.new }
let(:dummy_file) { File.join(Dir.pwd, 'test_file') }
describe '#initialize' do
it 'has no pending changes by default' do
expect(update_manager.changes?).to be_falsey
end
end
describe '#add_file' do
let(:content) { "some content\n" }
before(:each) do
update_manager.add_file(dummy_file, content)
end
it 'creates a pending change' do
expect(update_manager.changes?).to be_truthy
end
it 'creates a file added change' do
expect(update_manager.changes).to include(added: [{ path: dummy_file, content: content }])
end
it 'knows that the file will be changed' do
expect(update_manager.changed?(dummy_file)).to be_truthy
end
context 'when syncing the changes' do
let(:dummy_file_io) { StringIO.new }
before(:each) do
allow(File).to receive(:open).with(any_args).and_call_original
allow(File).to receive(:open).with(dummy_file, 'w').and_yield(dummy_file_io)
update_manager.sync_changes!
dummy_file_io.rewind
end
it 'writes the file to disk' do
expect(dummy_file_io.read).to eq(content)
end
context 'but if the file can not be written to' do
before(:each) do
allow(File).to receive(:open).with(dummy_file, 'w').and_raise(Errno::EACCES)
end
it 'exits with an error' do
expect {
update_manager.sync_changes!
}.to raise_error(PDK::CLI::ExitWithError, %r{You do not have permission to write to '#{Regexp.escape(dummy_file)}'})
end
end
end
end
describe '#remove_file' do
before(:each) do
update_manager.remove_file(dummy_file)
end
it 'creates a pending change' do
expect(update_manager.changes?).to be_truthy
end
it 'creates a file removed change' do
expect(update_manager.changes).to include(removed: [dummy_file])
end
it 'knows that the file will be changed' do
expect(update_manager.changed?(dummy_file)).to be_truthy
end
context 'when syncing the changes' do
context 'and the file exists' do
before(:each) do
allow(File).to receive(:file?).with(dummy_file).and_return(true)
end
it 'removes the file' do
expect(FileUtils).to receive(:rm).with(dummy_file)
update_manager.sync_changes!
end
context 'but it fails to remove the file' do
before(:each) do
allow(FileUtils).to receive(:rm).with(dummy_file).and_raise(StandardError, 'an unknown error')
end
it 'exits with an error' do
expect {
update_manager.sync_changes!
}.to raise_error(PDK::CLI::ExitWithError, %r{Unable to remove '#{Regexp.escape(dummy_file)}': an unknown error})
end
end
end
context 'and the file does not exist' do
before(:each) do
allow(File).to receive(:file?).with(dummy_file).and_return(false)
end
it 'does not attempt to remove the file' do
expect(FileUtils).not_to receive(:rm).with(dummy_file)
update_manager.sync_changes!
end
end
end
end
describe '#modify_file' do
let(:original_content) do
<<-EOS.gsub(%r{^ {8}}, '')
line 1
line 2
line 3
EOS
end
let(:new_content) do
<<-EOS.gsub(%r{^ {8}}, '')
line 4
line 2
line 3
line 1
EOS
end
before(:each) do
allow(File).to receive(:readable?).with(dummy_file).and_return(true)
allow(File).to receive(:read).with(dummy_file).and_return(original_content)
allow(File).to receive(:stat).with(dummy_file).and_return(instance_double(File::Stat, mtime: Time.now - 60))
end
context 'when the file can not be opened for reading' do
before(:each) do
allow(File).to receive(:readable?).with(dummy_file).and_return(false)
update_manager.modify_file(dummy_file, new_content)
end
it 'exits with an error' do
expect {
update_manager.changes
}.to raise_error(PDK::CLI::ExitWithError, %r{Unable to open '#{Regexp.escape(dummy_file)}' for reading})
end
end
context 'when the new file content differs from the original content' do
let(:expected_diff) do
<<-EOS.chomp.gsub(%r{^ {10}}, '')
@@ -1,4 +1,5 @@
-line 1
+line 4
line 2
line 3
+line 1
EOS
end
before(:each) do
update_manager.modify_file(dummy_file, new_content)
end
it 'creates a pending change' do
expect(update_manager.changes?).to be_truthy
end
it 'creates a file modified change' do
expect(update_manager.changes).to include(modified: { dummy_file => anything })
end
it 'creates a diff of the changes' do
diff_lines = update_manager.changes[:modified][dummy_file].split("\n")
expect(diff_lines[0]).to match(%r{\A--- #{Regexp.escape(dummy_file)}.+})
expect(diff_lines[1]).to match(%r{\A\+\+\+ #{Regexp.escape(dummy_file)}\.pdknew.+})
expect(diff_lines[2..-1].join("\n")).to eq(expected_diff)
end
it 'knows that the file will be changed' do
expect(update_manager.changed?(dummy_file)).to be_truthy
end
context 'when syncing the changes' do
let(:dummy_file_io) { StringIO.new }
before(:each) do
allow(File).to receive(:open).with(any_args).and_call_original
allow(File).to receive(:open).with(dummy_file, 'w').and_yield(dummy_file_io)
update_manager.sync_changes!
dummy_file_io.rewind
end
it 'writes the modified file to disk' do
expect(dummy_file_io.read).to eq(new_content)
end
context 'but if the file can not be written to' do
before(:each) do
allow(File).to receive(:open).with(dummy_file, 'w').and_raise(Errno::EACCES)
end
it 'exits with an error' do
expect {
update_manager.sync_changes!
}.to raise_error(PDK::CLI::ExitWithError, %r{You do not have permission to write to '#{Regexp.escape(dummy_file)}'})
end
end
end
end
context 'when the new file content matches the original content' do
before(:each) do
update_manager.modify_file(dummy_file, original_content)
end
it 'does not create a pending change' do
expect(update_manager.changes?).to be_falsey
end
it 'does not create a file modified change' do
expect(update_manager.changes).to include(modified: {})
end
it 'knows that the file will not be changed' do
expect(update_manager.changed?(dummy_file)).to be_falsey
end
context 'when syncing the changes' do
it 'does not modify the file' do
expect(File).not_to receive(:open).with(dummy_file, 'w')
update_manager.sync_changes!
end
end
end
end
end
| 29.622951 | 128 | 0.61995 |
21d523372206149c0fc392458d7eeb1da5e71711 | 1,304 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
# require "active_record/railtie"
# require "active_storage/engine"
require "action_controller/railtie"
# require "action_mailer/railtie"
require "action_view/railtie"
# require "action_cable/engine"
# require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Jsearch
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
config.autoload_paths << Rails.root.join('lib')
# Don't generate system test files.
config.generators.system_tests = nil
config.generators do |g|
g.stylesheets false
g.javascripts false
g.helper false
g.channel assets: false
end
end
end
| 31.047619 | 82 | 0.742331 |
031027ea79d261c56c90a79798aca49828bf48e7 | 472 | class StiForTransactions < ActiveRecord::Migration
def self.up
rename_table "creditcard_txns", "transactions"
add_column "transactions", "type", :string
remove_column "transactions", "creditcard_id"
Transaction.update_all(:type => 'CreditcardTxn') if defined? Transaction
end
def self.down
rename_table "transactions", "creditcard_txns"
remove_column "transactions", "type"
add_column "transactions", "creditcard_id", :integer
end
end
| 31.466667 | 76 | 0.741525 |
2611e0d2af821bb2d047db241c596b74ae604dff | 1,440 | module Chatmeter
class API
# GET /reviewBuilder/campaign/get
def list_all_campaigns
request(
expects: 200,
method: :get,
path: "/reviewBuilder/campaign/get"
)
end
# POST /reviewBuilder/campaign/create
def create_new_campaign(params)
request(
expects: 201,
method: :post,
path: "/reviewBuilder/campaign/create",
body: params.to_json
)
end
# GET /reviewBuilder/campaign/get/{campaignId}
def get_campaign_by_id(campaign_id)
request(
expects: 200,
method: :get,
path: "/reviewBuilder/campaign/get/#{campaign_id}"
)
end
# PUT /reviewBuilder/campaign/{campaignId}
def update_campaign(campaign_id, params)
request(
expects: 200,
method: :put,
path: "/reviewBuilder/campaign/#{campaign_id}",
body: params.to_json
)
end
# GET /reviewBuilder/campaign
def search_campaign(params={})
request(
expects: 200,
method: :get,
path: "/reviewBuilder/campaign?#{params.to_query}"
)
end
# DELETE /reviewBuilder/campaign/delete/{campaignId}
def delete_campaign(campaign_id)
request(
expects: 200,
method: :delete,
path: "/reviewBuilder/campaign/delete/#{campaign_id}?deleteReviews=true"
)
end
end
end
| 23.606557 | 84 | 0.581944 |
4a5b5a7813027eaa35b99ad1e08a74313f1fcded | 1,564 | require 'scrape_driver'
class ReadingScraper
attr_reader :twine
TEMP_SELECTOR = ".temperature-value"
def initialize(twine)
@twine = twine
end
def get_reading
temp = get_temp_from_supermechanical
make_and_return_reading_from_temp(temp)
end
private
def get_temp_from_supermechanical
supermech_noko = get_nokogirified_supermechanical_site
text_temp = get_temp_from_nokogiri_object(supermech_noko)
numberize(text_temp)
end
def numberize(text_temp)
text_temp == "" ? nil : text_temp.to_i
end
def get_html_from_supermechanical_site
session = ScrapeDriver.new
log_in_to_supermechanical_site(session)
html = session.html
session.driver.quit
return html
end
def log_in_to_supermechanical_site(session)
session.visit 'https://twine.cc/login?next=%2F'
session.fill_in 'email', :with => twine.email
session.fill_in 'password', :with => "33west26"
sleep 1 + rand(1..10)/50
session.click_button 'signin'
sleep 5
end
def get_nokogirified_supermechanical_site
html = get_html_from_supermechanical_site
Nokogiri::HTML(html)
end
def get_temp_from_nokogiri_object(noko)
noko.css(TEMP_SELECTOR).text
end
def current_outdoor_temp
CanonicalTemperature.get_hourly_reading(twine.zip_code)
end
def make_and_return_reading_from_temp(temp)
reading = Reading.new_from_twine(temp, current_outdoor_temp, twine, twine.user)
reading.save
return reading
end
end
| 24.061538 | 85 | 0.715473 |
036f629530079cb6ff5f61ca2f00aa389c873d0d | 1,402 | # Copyright 2011-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
When /^I create a vpn gateway$/ do
@vpn_gateway = @ec2.vpn_gateways.create
@created_vpn_gateways << @vpn_gateway
end
Then /^the vpn gateway should exist$/ do
@vpn_gateway.exists?.should == true
end
Then /^the vpn gateway state should eventually be "([^"]*)"$/ do |state|
eventually do
@vpn_gateway.state.to_s.should == state
end
end
When /^I delete the vpn gateway$/ do
@vpn_gateway.delete
end
When /^I attach the vpn gateway to the vpc$/ do
@vpn_gateway.attach(@vpc)
end
Then /^the vpn gateway's vpc should match$/ do
@vpn_gateway.vpc.should == @vpc
end
When /^I detach the vpn gateway and vpc$/ do
@vpn_gateway.detach(@vpc)
end
Then /^the vpn gateway attachment state should eventually be "([^"]*)"$/ do |state|
eventually do
@vpn_gateway.attachments.first.state.to_s.should == state
end
end
| 28.04 | 83 | 0.723966 |
6a1c08af59f367373216fc90f0f7512523460f32 | 1,035 | module Arango
class Server
module Batch
# === BATCH ===
def batch(requests: [])
Arango::RequestBatch.new(server: self, requests: requests)
end
def create_dump_batch(ttl:, dbserver: nil)
query = { DBserver: dbserver }
body = { ttl: ttl }
result = request("POST", "_api/replication/batch",
body: body, query: query)
return result if return_directly?(result)
return result[:id]
end
def destroy_dump_batch(id:, dbserver: nil)
query = {DBserver: dbserver}
result = request("DELETE", "_api/replication/batch/#{id}", query: query)
return_delete(result)
end
def prolong_dump_batch(id:, ttl:, dbserver: nil)
query = { DBserver: dbserver }
body = { ttl: ttl }
result = request("PUT", "_api/replication/batch/#{id}",
body: body, query: query)
return result if return_directly?(result)
return true
end
end
end
end
| 28.75 | 80 | 0.566184 |
e28e5ddec323b119c39af6b138a090d1514c8b80 | 470 | class MiqWorker
module ReplicaPerWorker
extend ActiveSupport::Concern
def create_container_objects
ContainerOrchestrator.new.create_deployment(worker_deployment_name) do |definition|
configure_worker_deployment(definition)
end
scale_deployment
end
def delete_container_objects
ContainerOrchestrator.new.delete_deployment(worker_deployment_name)
end
def stop_container
scale_deployment
end
end
end
| 22.380952 | 89 | 0.761702 |
870ceee61d1be3c6724ef3a642eed99e7a162a1b | 7,151 | =begin
#Fatture in Cloud API v2 - API Reference
#Connect your software with Fatture in Cloud, the invoicing platform chosen by more than 400.000 businesses in Italy. The Fatture in Cloud API is based on REST, and makes possible to interact with the user related data prior authorization via OAuth2 protocol.
The version of the OpenAPI document: 2.0.16
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.4.0
=end
require 'date'
require 'time'
module FattureInCloud_Ruby_Sdk
#
class CreateReceivedDocumentRequest
# Pending received document id of the document from which the new document is created.
attr_accessor :pending_id
attr_accessor :data
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'pending_id' => :'pending_id',
:'data' => :'data'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'pending_id' => :'Integer',
:'data' => :'ReceivedDocument'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
:'pending_id',
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `FattureInCloud_Ruby_Sdk::CreateReceivedDocumentRequest` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `FattureInCloud_Ruby_Sdk::CreateReceivedDocumentRequest`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'pending_id')
self.pending_id = attributes[:'pending_id']
end
if attributes.key?(:'data')
self.data = attributes[:'data']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
pending_id == o.pending_id &&
data == o.data
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[pending_id, data].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
when :Hash
value
else # model
# models (e.g. Pet) or oneOf
klass = FattureInCloud_Ruby_Sdk.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.95671 | 261 | 0.632779 |
03ee5b9e2a40f236437ea271c7d812dfe8a4ed2e | 4,440 | # Manage SELinux context of files.
#
# This code actually manages three pieces of data in the context.
#
# [root@delenn files]# ls -dZ /
# drwxr-xr-x root root system_u:object_r:root_t /
#
# The context of '/' here is 'system_u:object_r:root_t'. This is
# three seperate fields:
#
# system_u is the user context
# object_r is the role context
# root_t is the type context
#
# All three of these fields are returned in a single string by the
# output of the stat command, but set individually with the chcon
# command. This allows the user to specify a subset of the three
# values while leaving the others alone.
#
# See http://www.nsa.gov/selinux/ for complete docs on SELinux.
module Puppet
require 'puppet/util/selinux'
class SELFileContext < Puppet::Property
include Puppet::Util::SELinux
def retrieve
return :absent unless @resource.stat
context = self.get_selinux_current_context(@resource[:path])
parse_selinux_context(name, context)
end
def retrieve_default_context(property)
if @resource[:selinux_ignore_defaults] == :true
return nil
end
unless context = self.get_selinux_default_context(@resource[:path])
return nil
end
property_default = self.parse_selinux_context(property, context)
self.debug "Found #{property} default '#{property_default}' for #{@resource[:path]}" if not property_default.nil?
property_default
end
def insync?(value)
if not selinux_support?
debug("SELinux bindings not found. Ignoring parameter.")
true
elsif not selinux_label_support?(@resource[:path])
debug("SELinux not available for this filesystem. Ignoring parameter.")
true
else
super
end
end
def sync
self.set_selinux_context(@resource[:path], @should, name)
:file_changed
end
end
Puppet::Type.type(:file).newparam(:selinux_ignore_defaults) do
desc "If this is set then Puppet will not ask SELinux (via matchpathcon) to
supply defaults for the SELinux attributes (seluser, selrole,
seltype, and selrange). In general, you should leave this set at its
default and only set it to true when you need Puppet to not try to fix
SELinux labels automatically."
newvalues(:true, :false)
defaultto :false
end
Puppet::Type.type(:file).newproperty(:seluser, :parent => Puppet::SELFileContext) do
desc "What the SELinux user component of the context of the file should be.
Any valid SELinux user component is accepted. For example `user_u`.
If not specified it defaults to the value returned by matchpathcon for
the file, if any exists. Only valid on systems with SELinux support
enabled."
@event = :file_changed
defaultto { self.retrieve_default_context(:seluser) }
end
Puppet::Type.type(:file).newproperty(:selrole, :parent => Puppet::SELFileContext) do
desc "What the SELinux role component of the context of the file should be.
Any valid SELinux role component is accepted. For example `role_r`.
If not specified it defaults to the value returned by matchpathcon for
the file, if any exists. Only valid on systems with SELinux support
enabled."
@event = :file_changed
defaultto { self.retrieve_default_context(:selrole) }
end
Puppet::Type.type(:file).newproperty(:seltype, :parent => Puppet::SELFileContext) do
desc "What the SELinux type component of the context of the file should be.
Any valid SELinux type component is accepted. For example `tmp_t`.
If not specified it defaults to the value returned by matchpathcon for
the file, if any exists. Only valid on systems with SELinux support
enabled."
@event = :file_changed
defaultto { self.retrieve_default_context(:seltype) }
end
Puppet::Type.type(:file).newproperty(:selrange, :parent => Puppet::SELFileContext) do
desc "What the SELinux range component of the context of the file should be.
Any valid SELinux range component is accepted. For example `s0` or
`SystemHigh`. If not specified it defaults to the value returned by
matchpathcon for the file, if any exists. Only valid on systems with
SELinux support enabled and that have support for MCS (Multi-Category
Security)."
@event = :file_changed
defaultto { self.retrieve_default_context(:selrange) }
end
end
| 35.806452 | 119 | 0.707658 |
ab4c80224b2966966dff838aabf704a892ba4ff0 | 724 | # -*- encoding: utf-8 -*-
require File.expand_path('../lib/legacy_model_generator/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Allan Davis"]
gem.email = ["[email protected]"]
gem.description = %q{Model from legancy database}
gem.summary = %q{Tools for creating model from legancy database.}
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "legacy_model_generator"
gem.require_paths = ["lib"]
gem.version = LegacyModelGenerator::VERSION
gem.add_dependency "rails"
end
| 38.105263 | 75 | 0.643646 |
39145a0ddc14402b5ae313cc7e8bde8bb5d1cba5 | 10,987 | require 'tempfile'
module Acceptance; end
module Acceptance::Helpers; end
# Methods to support 'simp kv' testing. Data is generated by the kv_test
# Puppet module.
module Acceptance::Helpers::KvTestData
# @return Hash of initial keys to be pre-seeded in the 'default' and 'custom'
# backends via the kv_test module
#
# - Keys cannot have Binary values. Those are handled separately
# (see initial_binary_key_info()).
# - Hash will be used to set kv_test::params::key_info in hiera
# - Primary key is the name of the simpkv backend.
# - simpkv backends are configured in hiera via simpkv::options
# - kv_test module will automatically generate metadata with an 'id' string
# attribute that includes the backend name, either 'global' or the Puppet
# environment and the key. For example,
# 'default production boolean'
# 'dev global global_complex/hash'
#
def initial_key_info
{
'default' => {
'keys' => {
'boolean' => true,
'integer' => 123,
'float' => 4.567,
'string' => 'string1',
'complex/array_strings' => [ 'string2', 'string3' ],
'complex/array_integers' => [ 8, 9, 10 ],
'complex/hash' => {
'key1' => 'string4',
'key2' => 11,
'key3' => false,
'key4' => {
'nkey1' => 'string5',
'nkey2' => true,
'nkey3' => 12
}
}
},
'global_keys' => {
'global_boolean' => true,
'global_integer' => 123,
'global_float' => 4.567,
'global_string' => 'string1',
'global_complex/array_strings' => [ 'string2', 'string3' ],
'global_complex/array_integers' => [ 8, 9, 10 ],
'global_complex/hash' => {
'key1' => 'string4',
'key2' => 11,
'key3' => false,
'key4' => {
'nkey1' => 'string5',
'nkey2' => true,
'nkey3' => 12
}
}
}
},
'custom' => {
'keys' => {
'boolean' => true,
'integer' => 123,
'float' => 4.567,
'string' => 'string1',
'complex/array_strings' => [ 'string2', 'string3' ],
'complex/array_integers' => [ 8, 9, 10 ],
'complex/hash' => {
'key1' => 'string4',
'key2' => 11,
'key3' => false,
'key4' => {
'nkey1' => 'string5',
'nkey2' => true,
'nkey3' => 12
}
}
},
'global_keys' => {
'global_boolean' => true,
'global_integer' => 123,
'global_float' => 4.567,
'global_string' => 'string1',
'global_complex/array_strings' => [ 'string2', 'string3' ],
'global_complex/array_integers' => [ 8, 9, 10 ],
'global_complex/hash' => {
'key1' => 'string4',
'key2' => 11,
'key3' => false,
'key4' => {
'nkey1' => 'string5',
'nkey2' => true,
'nkey3' => 12
}
}
}
}
}
end
# @return Hash of initial keys with Binary values to be pre-seeded in the
# 'default' and 'custom' backends via the kv_test module
#
# - Hash will be used to set kv_test::params::binary_key_info in hiera
# - Primary key is the name of the simpkv backend.
# - simpkv backends are configured in hiera via simpkv::options
# - The value of each key is either a file reference for a file in the
# a Puppet module or a fully qualified path to a file on the Puppet server.
# - kv_test module will automatically generate metadata with an 'id' string
# attribute that includes the backend name, either 'global' or the Puppet
# environment and the key. For example,
# 'default production boolean'
# 'dev global global_complex/hash'
#
def initial_binary_key_info
{
'default' => {
'keys' => {
'complex/binary' => 'kv_test/test_krb5.keytab'
},
'global_keys' => {
'global_complex/binary' => 'kv_test/test_krb5.keytab'
}
},
'custom' => {
'keys' => {
'complex/binary' => 'kv_test/test_krb5.keytab'
},
'global_keys' => {
'global_complex/binary' => 'kv_test/test_krb5.keytab'
}
}
}
end
# @return the Base64-encoded value of the binary data provided by the
# kv_test module
def binary_base64
'BQIAAABXAAIABFRFU1QABGhvc3QAFmZha2VfaG9zdDEuc29tZS5kb21haW4AAAABXXfabAEAEg'\
'AgaDM0Qy9kh4YJ5F9e9Z0sB9HNY+ejsIUn8nTBbT5xsrYAAAABAAAARwACAARURVNUAARob3N0'\
'ABZmYWtlX2hvc3QxLnNvbWUuZG9tYWluAAAAAV132mwBABEAEDZg19gW99qgG+DQYeyffFMAAAAB'
end
def brief_kv_list_results(global)
prefix = global ? 'global_' : ''
{
'/' => {
'keys' => [
"#{prefix}boolean",
"#{prefix}float",
"#{prefix}integer",
"#{prefix}string",
],
'folders' => [ "#{prefix}complex" ]
},
"#{prefix}complex" => {
'keys' => [
'array_integers',
'array_strings',
'binary',
'hash'
],
'folders' => []
}
}
end
def detailed_kv_list_results(id_prefix, global)
prefix = global ? 'global_' : ''
{
'/' => {
'keys' => {
"#{prefix}boolean" => {
'value' => true,
'metadata' => { 'id' => "#{id_prefix} #{prefix}boolean" }
},
"#{prefix}float" => {
'value' => 4.567,
'metadata' => { 'id' => "#{id_prefix} #{prefix}float" }
},
"#{prefix}integer" => {
'value' => 123,
'metadata' => { 'id' => "#{id_prefix} #{prefix}integer" }
},
"#{prefix}string" => {
'value' => 'string1',
'metadata' => { 'id' => "#{id_prefix} #{prefix}string" }
}
},
'folders' => [ "#{prefix}complex" ]
},
"#{prefix}complex" => {
'keys' => {
'array_integers' => {
'value' => [ 8, 9, 10 ],
'metadata' => { 'id' => "#{id_prefix} #{prefix}complex/array_integers" }
},
'array_strings' => {
'value' => [ 'string2', 'string3' ],
'metadata' => { 'id' => "#{id_prefix} #{prefix}complex/array_strings" }
},
'binary' => {
'value' => binary_base64,
'metadata' => { 'id' => "#{id_prefix} #{prefix}complex/binary" },
'encoding' => 'base64',
'original_encoding' => 'ASCII-8BIT'
},
'hash' => {
'value' => {
'key1' => 'string4',
'key2' => 11,
'key3' => false,
'key4' => { 'nkey1' => 'string5', 'nkey2' => true, 'nkey3' => 12 }
},
'metadata' => { 'id' => "#{id_prefix} #{prefix}complex/hash" }
}
},
'folders' => []
}
}
end
def keys_info(folder, detailed_list)
keys = {}
prefix = (folder == '/') ? '' : "#{folder}/"
detailed_list[folder]['keys'].each do |key,info|
keys["#{prefix}#{key}"] = info
end
keys
end
# change the value and metadata for each key info in a list result
def change_key_info(current_list)
updated_list = Marshal.load(Marshal.dump(current_list))
updated_list.each do |folder,folder_info|
folder_info['keys'].each do |key,key_info|
if key_info['value'].is_a?(Array) || key_info['value'].is_a?(String)
key_info['value'] = key_info['value'] + key_info['value']
end
if key_info['value'].is_a?(TrueClass) || key_info['value'].is_a?(FalseClass)
key_info['value'] = ! key_info['value']
end
if key_info['value'].is_a?(Hash)
key_info['value']['new_key'] = 'new string elem'
end
if key_info['value'].is_a?(Numeric)
key_info['value'] *= 10
end
key_info['metadata']['version'] = 2
end
end
updated_list
end
def create_key_info(regular_keys, binary_keys, env, backend )
keys = {}
list = { }
location = env.nil? ? 'globals' : File.join('environments', env)
regular_keys.each do |key|
key_path = key
key_dir = File.dirname(key_path)
key_dir = '/' if key_dir == '.'
unless list.key?(key_dir)
list[key_dir] = { 'keys' => {}, 'folders' => []}
end
info = {
'value' => "#{key_path} value",
'metadata' => { 'id' => "#{backend} #{location} #{key_path}" }
}
keys[key_path] = info
list[key_dir]['keys'][File.basename(key_path)] = info
end
sub_list = { 'keys' => {}, 'folders' => [] }
binary_keys.each do |key|
key_path = key
key_dir = File.dirname(key_path)
key_dir = '/' if key_dir == '.'
unless list.key?(key_dir)
list[key_dir] = { 'keys' => {}, 'folders' => []}
end
info = {
'value' => binary_base64,
'encoding' => 'base64',
'original_encoding' => 'ASCII-8BIT',
'metadata' => {
'id' => "#{backend} #{location} #{key_path}"
}
}
keys[key_path] = info
list[key_dir]['keys'][File.basename(key_path)] = info
end
[ keys, list ]
end
def run_and_load_json(host, cmd, json_file)
on(host, cmd)
JSON.load( on(host, "cat #{json_file}").stdout )
end
def verify_files(host, keys, root_path)
keys.each do |key,info|
file = File.join(root_path, key)
if info.key?('encoding')
puts "Verifying '#{key}' value"
actual_binary_file = "#{file}.bin"
expected_binary_file = "/root/#{File.basename(key)}.bin"
Tempfile.open 'kv_test_data' do |tempfile|
File.open(tempfile.path, 'w') do |file|
file.write(Base64.strict_decode64(info['value']))
end
copy_to(host, tempfile.path, expected_binary_file)
end
puts "Verifying '#{key}' metadata"
meta_file = "#{file}.meta"
actual_meta = JSON.load( on(host, "cat #{meta_file}").stdout )
expect( actual_meta ).to eq(info['metadata'])
else
puts "Verifying '#{key}' value and metadata"
actual = JSON.load( on(host, "cat #{file}").stdout )
expect( actual ).to eq(info)
end
end
end
end
| 31.846377 | 86 | 0.48612 |
185999c4fb3a9d99bd8cd204a926c260909567e1 | 3,187 | # frozen_string_literal: true
require 'json'
require 'active_support/core_ext/hash/indifferent_access'
require 'active_support/core_ext/hash/except'
require_relative 'active_records/base'
require_relative 'active_records/matches'
require_relative 'active_records/rankings'
require_relative 'active_records/rounds'
require_relative 'active_records/teams'
class ScrapCbfRecord
# This module uses Active Record module to save data on database.
class ActiveRecord
class << self
def save(records)
new(records).save
true
end
end
# The argument records is a hash (json or not) with the following look :
# - hash[:championship] the championship for a specific year and divison
# - hash[:matches] the matches for the specific championship
# - hash[:rankings] the rankings for the specific championship
# - hash[:rounds] the rounds for the specific championship
# - hash[:teams] the teams that participated on the specific championship
#
# @param [records] hash or json returned from ScrapCbf gem
# @return [nil]
def initialize(records)
records = parse_json!(records) if records.is_a?(String)
raise ::ArgumentError, invalid_type_message unless records.is_a?(Hash)
@records = records.with_indifferent_access
validate_record_key_presence!(@records)
@championship = @records[:championship]
@matches = @records[:matches]
@rankings = @records[:rankings]
@rounds = @records[:rounds]
@teams = @records[:teams]
end
# Save records to the database.
# Note: Because of database relationships and dependencies between records
# there maybe exist a saving order.
# - Teams must be save before Rankings and Match.
# - Rounds must be save before Matches
#
# @raise [ActiveRecordValidationError] in case of failing while saving
#
# @return [Boolean] true in case of success
def save
save_teams(@teams)
save_rankings(@rankings, @championship)
save_rounds(@rounds, @championship)
save_matches(@matches, @championship)
true
end
private
def save_teams(teams)
Teams.new(teams).create_unless_found
end
def save_rankings(rankings, championship)
Rankings.new(rankings).create_or_update(championship)
end
def save_rounds(rounds, championship)
Rounds.new(rounds).create_unless_found(championship)
end
def save_matches(matches, championship)
Matches.new(matches).create_or_update(championship)
end
def invalid_type_message
'must be a Hash or Json of a Hash'
end
def parse_json!(records)
JSON.parse(records)
rescue JSON::ParserError => e
raise JsonDecodeError, e
end
def validate_record_key_presence!(records)
raise MissingKeyError, 'championship' unless records.key?(:championship)
raise MissingKeyError, 'matches' unless records.key?(:matches)
raise MissingKeyError, 'rankings' unless records.key?(:rankings)
raise MissingKeyError, 'rounds' unless records.key?(:rounds)
raise MissingKeyError, 'teams' unless records.key?(:teams)
true
end
end
end
| 30.066038 | 78 | 0.706934 |
5d22bbfb32e5c45a94da29922188eeeee72a061f | 559 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::MachineLearningServices::Mgmt::V2019_05_01
module Models
#
# Defines values for ComputeType
#
module ComputeType
AKS = "AKS"
AmlCompute = "AmlCompute"
DataFactory = "DataFactory"
VirtualMachine = "VirtualMachine"
HDInsight = "HDInsight"
Databricks = "Databricks"
DataLakeAnalytics = "DataLakeAnalytics"
end
end
end
| 25.409091 | 70 | 0.692308 |
ede33fbd6bac994a7422179e371ca7b5e49e5c67 | 1,043 | Pod::Spec.new do |s|
s.name = "ObjectivePGP"
s.version = "0.13.0"
s.summary = "OpenPGP for iOS and macOS"
s.description = "Native OpenPGP (RFC 4880) implementation for iOS and macOS."
s.homepage = "http://objectivepgp.com"
s.license = { :type => 'BSD for non-commercial use', :file => 'LICENSE.txt' }
s.source = { :git => "https://github.com/dsanghan/ObjectivePGP.git" }
s.authors = {'Marcin Krzyzanowski' => '[email protected]'}
s.social_media_url = "https://twitter.com/krzyzanowskim"
s.ios.deployment_target = '8.0'
s.ios.header_dir = 'ObjectivePGP'
s.osx.deployment_target = '10.9'
s.osx.header_dir = 'ObjectivePGP'
s.source_files = 'ObjectivePGP/*.{h,m}', 'ObjectivePGP/CryptoBox/*.{h,m}'
s.public_header_files = 'ObjectivePGP/*.h', 'ObjectivePGP/CryptoBox/*.h'
s.dependency 'OpenSSL-Universal'
s.requires_arc = true
s.pod_target_xcconfig = { 'OTHER_LDFLAGS' => '-lObjC' }
s.libraries = 'z', 'bz2'
s.requires_arc = true
end
| 34.766667 | 84 | 0.639501 |
218f2e99f8d5c7771d532296804cac466772379c | 342 | require './spec/spec_helper'
describe Player do
subject { Player.new 1, 'TestUser' }
let(:company) { Company.new subject, *(['BSE'].concat Company::COMPANIES['BSE']) }
describe '#value' do
it 'should add up everything' do
subject.companies << company
expect(subject.value).to eq(30 + company.value)
end
end
end
| 22.8 | 84 | 0.660819 |
1d39b123f06b5fc59b4572e367d680853989db45 | 194 | class UsersController < ApplicationController
def show
@user = User.find(params[:id])
@images = @user.images
end
def index
@users = User.all.order(username: :desc)
end
end
| 16.166667 | 45 | 0.670103 |
116b13617205a8626a756b02d60a56437514a967 | 4,327 | # -*- coding: utf-8 -*- #
# frozen_string_literal: true
# TODO how are we going to handle soft/hard contrast?
module Rouge
module Themes
# Based on https://github.com/morhetz/gruvbox, with help from
# https://github.com/daveyarwood/gruvbox-pygments
class Gruvbox < CSSTheme
name 'gruvbox'
# global Gruvbox colours {{{
C_dark0_hard = '#1d2021'
C_dark0 ='#282828'
C_dark0_soft = '#32302f'
C_dark1 = '#3c3836'
C_dark2 = '#504945'
C_dark3 = '#665c54'
C_dark4 = '#7c6f64'
C_dark4_256 = '#7c6f64'
C_gray_245 = '#928374'
C_gray_244 = '#928374'
C_light0_hard = '#f9f5d7'
C_light0 = '#fbf1c7'
C_light0_soft = '#f2e5bc'
C_light1 = '#ebdbb2'
C_light2 = '#d5c4a1'
C_light3 = '#bdae93'
C_light4 = '#a89984'
C_light4_256 = '#a89984'
C_bright_red = '#fb4934'
C_bright_green = '#b8bb26'
C_bright_yellow = '#fabd2f'
C_bright_blue = '#83a598'
C_bright_purple = '#d3869b'
C_bright_aqua = '#8ec07c'
C_bright_orange = '#fe8019'
C_neutral_red = '#cc241d'
C_neutral_green = '#98971a'
C_neutral_yellow = '#d79921'
C_neutral_blue = '#458588'
C_neutral_purple = '#b16286'
C_neutral_aqua = '#689d6a'
C_neutral_orange = '#d65d0e'
C_faded_red = '#9d0006'
C_faded_green = '#79740e'
C_faded_yellow = '#b57614'
C_faded_blue = '#076678'
C_faded_purple = '#8f3f71'
C_faded_aqua = '#427b58'
C_faded_orange = '#af3a03'
# }}}
extend HasModes
def self.light!
mode :dark # indicate that there is a dark variant
mode! :light
end
def self.dark!
mode :light # indicate that there is a light variant
mode! :dark
end
def self.make_dark!
palette bg0: C_dark0
palette bg1: C_dark1
palette bg2: C_dark2
palette bg3: C_dark3
palette bg4: C_dark4
palette gray: C_gray_245
palette fg0: C_light0
palette fg1: C_light1
palette fg2: C_light2
palette fg3: C_light3
palette fg4: C_light4
palette fg4_256: C_light4_256
palette red: C_bright_red
palette green: C_bright_green
palette yellow: C_bright_yellow
palette blue: C_bright_blue
palette purple: C_bright_purple
palette aqua: C_bright_aqua
palette orange: C_bright_orange
end
def self.make_light!
palette bg0: C_light0
palette bg1: C_light1
palette bg2: C_light2
palette bg3: C_light3
palette bg4: C_light4
palette gray: C_gray_244
palette fg0: C_dark0
palette fg1: C_dark1
palette fg2: C_dark2
palette fg3: C_dark3
palette fg4: C_dark4
palette fg4_256: C_dark4_256
palette red: C_faded_red
palette green: C_faded_green
palette yellow: C_faded_yellow
palette blue: C_faded_blue
palette purple: C_faded_purple
palette aqua: C_faded_aqua
palette orange: C_faded_orange
end
dark!
mode :light
style Text, :fg => :fg0, :bg => :bg0
style Error, :fg => :red, :bg => :bg0, :bold => true
style Comment, :fg => :gray, :italic => true
style Comment::Preproc, :fg => :aqua
style Name::Tag, :fg => :red
style Operator,
Punctuation, :fg => :fg0
style Generic::Inserted, :fg => :green, :bg => :bg0
style Generic::Deleted, :fg => :red, :bg => :bg0
style Generic::Heading, :fg => :green, :bold => true
style Keyword, :fg => :red
style Keyword::Constant, :fg => :purple
style Keyword::Type, :fg => :yellow
style Keyword::Declaration, :fg => :orange
style Literal::String,
Literal::String::Interpol,
Literal::String::Regex, :fg => :green, :italic => true
style Literal::String::Affix, :fg => :red
style Literal::String::Escape, :fg => :orange
style Name::Namespace,
Name::Class, :fg => :aqua
style Name::Constant, :fg => :purple
style Name::Attribute, :fg => :green
style Literal::Number, :fg => :purple
style Literal::String::Symbol, :fg => :blue
end
end
end
| 25.304094 | 66 | 0.587243 |
33865bd454ba6cb0a088016ea2d824addb064805 | 3,613 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Action Cable endpoint configuration
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
config.logger = ActiveSupport::TaggedLogging.new(Logger.new(STDOUT))
end
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "server_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.056818 | 102 | 0.760864 |
1a97f32e8b10929348ee5a5ab424d1cebd30fcaf | 9,173 | #--
# Copyright (c) 2012+ Damjan Rems
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
module DrgcmsFormFields
###########################################################################
# Implementation of select DRG CMS form field.
#
# ===Form options:
# * +name:+ field name (required)
# * +type:+ select (required)
# * +choices:+ Values for choices separated by comma. Values can also be specified like description:value.
# In the example description will be shown to user, but value will be saved to document.
# choices: 'OK:0,Ready:1,Error:2'
# choices: Ruby,Pyton,PHP
# * +eval:+ Choices will be provided by evaluating expression
# * eval: dc_choices4('model_name','description_field_name','_id'); dc_choices4 helper will provide data for select field.
# * eval: ModelName.choices4_field; ModelName class will define method choices4_field which
# will provide data for select field.
# * collection_name.search_field_name.method_name; When searching is more complex custom search
# method may be defined in CollectionName model which will provide result set for search.
# * If choices or eval is not defined choices will be provided from translation helpers. For example:
# Collection has field status choices for field may be provided by en.helpers.model_name.choices4_status
# entry of english translation. English is of course default translation. If you provide translations in
# your local language then select choices will be localized.
# en.helpers.model_name.choices4_status: 'OK:0,Ready:1,Error:2'
# sl.helpers.model_name.choices4_status: 'V redu:0,Pripravljen:1,Napaka:2'
# * +depend:+ Select options may depend on a value in some other field. If depend option is specified
# then chices must be provided by class method and defined in eval option.
# * +html:+ html options which apply to select field (optional)
#
# Form example:
# 30:
# name: type
# type: select
# 40:
# name: parent
# type: select
# eval: DcCategory.values_for_parent
# html:
# include_blank: true
# 50:
# name: company
# type: select
# choices: Audi,BMW,Mercedes
# or
# choices: helpers.label.model.choices4_field
# 60:
# name: type
# type: select
# eval: Cars.choices4_type
# depend: company
###########################################################################
class Select < DrgcmsField
###########################################################################
# Choices are defined in helper as:
# helper.label.table_name.choices_for_fieldname or
# choices4_tablename_fieldname
###########################################################################
def choices_in_helper(helper = nil)
helper ||= "helpers.label.#{@form['table']}.choices4_#{@yaml['name']}"
c = t(helper)
if c.match( 'translation missing' )
helper = "choices_for_#{@form['table']}_#{@yaml['name']}"
return "Error. #{helper} not defined" if c.match( 'translation missing' )
end
c
end
###########################################################################
# Choices are defined by evaluating an expression. This is most common class
# method defined in a class. eg. SomeClass.get_choices4
###########################################################################
def choices_in_eval(e)
e.strip!
if @yaml['depend'].nil?
method = e.split(/\ |\(/).first
return eval(e) if respond_to?(method) # id method defined here
return eval('@parent.' + e) if @parent.respond_to?(method) # is method defined in helpers
# eval whatever it is there
eval e
else
# add event listener to depend field
@js << %(
$(document).ready(function() {
$('#record_#{@yaml['depend']}').change( function(e) { update_select_depend('record_#{@yaml['name']}', 'record_#{@yaml['depend']}','#{e}');});
$('#_record_#{@yaml['depend']}').change( function(e) { update_select_depend('record_#{@yaml['name']}', '_record_#{@yaml['depend']}','#{e}');});
});
)
# depend field might be virtual field. It's value should be set in params
depend_value = @yaml['depend'][0] == '_' ? @parent.params["p_#{@yaml['depend']}"] : @record[@yaml['depend']]
e << " '#{depend_value}'"
eval e
end
end
###########################################################################
# Create choices array for select field.
###########################################################################
def get_choices
begin
choices = case
when @yaml['eval'] then
choices_in_eval(@yaml['eval'])
when @yaml['choices'] then
@yaml['choices'].match('helpers.') ? choices_in_helper(@yaml['choices']) : @yaml['choices']
else
choices_in_helper()
end
return choices unless choices.class == String
choices.chomp.split(',').map { |e| e.match(':') ? e.split(':') : e }
rescue Exception => e
Rails.logger.debug "\nError in select eval. #{e.message}\n"
Rails.logger.debug(e.backtrace.join($/)) if Rails.env.development?
['error'] # return empty array when error occures
end
end
###########################################################################
# Will add code to view more data about selected option in a window
###########################################################################
def add_view_code
return '' if (data = @record.send(@yaml['name'])).blank?
table, form_name = @yaml['view'].split(/\ |\,/).delete_if { |e| e.blank? }
url = @parent.url_for(controller: :cmsedit, id: data, action: :edit, table: table, form_name: form_name, readonly: true, window_close: 1 )
icon = @parent.fa_icon('eye')
%(<span class="dc-window-open" data-url="#{url}">#{icon}</span>)
end
###########################################################################
# Return value when readonly is required
###########################################################################
def ro_standard
value = @record.respond_to?(@yaml['name']) ? @record.send(@yaml['name']) : nil
return self if value.blank?
html = ''
choices = get_choices()
if value.class == Array # multiple choices
value.each do |element|
choices.each do |choice|
if choice.to_s == element.to_s
html << '<br>' if html.size > 0
html << "#{element.to_s}"
end
end
end
else
choices.each do |choice|
if choice.class == Array
(html = choice.first; break) if choice.last.to_s == value.to_s
else
(html = choice; break) if choice.to_s == value.to_s
end
end
html << add_view_code if @yaml['view']
end
super(html)
end
###########################################################################
# Render select field html code
###########################################################################
def render
return ro_standard if @readonly
set_initial_value('html','selected')
# separate options and html part
html_part = {}
@yaml['html'].symbolize_keys!
%i(class id style required).each { |sym| html_part[sym] = @yaml['html'].delete(sym) if html_part[sym]}
html_part[:multiple] = true if @yaml['multiple']
record = record_text_for(@yaml['name'])
if html_part[:multiple]
@html << @parent.select(record, @yaml['name'], get_choices, @yaml['html'], html_part)
@js << "$('##{record}_#{@yaml['name']}').selectMultiple();"
else
@html << @parent.select(record, @yaml['name'], get_choices, @yaml['html'], html_part)
# add code for view more data
@html << add_view_code() if @yaml['view']
end
self
end
###########################################################################
# Return value.
###########################################################################
def self.get_data(params, name)
if params['record'][name].class == Array
params['record'][name].delete_if {|e| e.blank? }
return if params['record'][name].size == 0
# convert to BSON objects
is_id = BSON::ObjectId.legal?(params['record'][name].first)
return params['record'][name].map{ |e| BSON::ObjectId.from_string(e) } if is_id
end
params['record'][name]
end
end
end
| 40.588496 | 145 | 0.584651 |
e270274193ef3a5434b57414a51d1d201d00aa80 | 2,059 | shared_examples "date_time_for" do |field|
it "returns date" do
subject.public_send("#{field}=", Time.zone.local(2012, 9, 6, 1, 30))
expect(subject.public_send("#{field}_date")).to eq(Date.civil(2012, 9, 6))
end
it "returns seconds of beginning of day" do
subject.public_send("#{field}=", Time.zone.local(2012, 9, 6, 1, 30))
expect(subject.public_send("formatted_#{field}_time")).to eq("01:30")
expect(subject.public_send("#{field}_day")).to eq("06")
expect(subject.public_send("#{field}_month")).to eq("09")
expect(subject.public_send("#{field}_year")).to eq("2012")
end
it "returns Time" do
subject.public_send "#{field}_day=", "06"
subject.public_send "#{field}_month=", "09"
subject.public_send "#{field}_year=", "2012"
subject.public_send "formatted_#{field}_time=", "01:30"
expect(subject.public_send(field)).to eq(Time.zone.local(2012, 9, 6, 1, 30))
end
it "resets value" do
subject.public_send("#{field}=", Time.zone.local(2012, 9, 6, 1, 30))
subject.public_send("#{field}_day=", "")
subject.public_send("#{field}_month=", "")
subject.public_send("#{field}_year=", "")
subject.public_send("formatted_#{field}_time=", "")
expect(subject.public_send(field)).to be_nil
end
it "updates date" do
subject.public_send("#{field}=", Time.zone.local(2012, 9, 6, 1, 30))
subject.public_send("#{field}_day=", "05")
subject.public_send("#{field}_month=", "09")
subject.public_send("#{field}_year=", "2012")
expect(subject.public_send(field)).to eq(Time.zone.local(2012, 9, 5, 1, 30))
end
it "updates time twice" do
subject.public_send("#{field}=", Time.zone.local(2012, 9, 6, 1, 30))
subject.public_send("#{field}_day=", "05")
subject.public_send("#{field}_month=", "09")
subject.public_send("#{field}_year=", "2012")
subject.public_send "formatted_#{field}_time=", "01:30"
subject.public_send "formatted_#{field}_time=", "03:30"
expect(subject.public_send(field)).to eq(Time.zone.local(2012, 9, 5, 3, 30))
end
end
| 41.18 | 80 | 0.654687 |
5d2c9861e269b20a99ee34e216127f57a9d9533f | 197 | # typed: false
class F
sig {void}
def method1
end
def method2
# ^^^ error: Hint: this "def" token might not be properly closed
puts 'hello'
end # error: unexpected token "end of file"
| 17.909091 | 64 | 0.670051 |
38e290f2186bdcf012c35661066b56b414fff47b | 977 | require "test_helper"
class ConversationPresenterTest < ActiveSupport::TestCase
context "#optimized_present_all" do
should "present the same content as #unoptimized_present_all" do
ability = Class.new do
def can?(*args)
true
end
end.new
user = User.first
conversations = [
FactoryGirl.create(:conversation),
FactoryGirl.create(:conversation),
FactoryGirl.create(:conversation)
]
conversations[1].read_by! user
conversations[2].set_signal_strength_by! user, 2
presenter = Houston::Feedback::ConversationPresenter.new(ability, :ignore)
conversations = Houston::Feedback::Conversation.with_flags_for(user)
expected_results = JSON.pretty_generate presenter.unoptimized_present_all(conversations)
actual_results = JSON.pretty_generate presenter.optimized_present_all(conversations)
assert_equal expected_results, actual_results
end
end
end
| 28.735294 | 94 | 0.714432 |
0179f41ec6a1d78e06c8dd1cc4435016804c2b0e | 4,083 | Rails.application.configure do
# Verifies that versions and hashed value of the package contents in the project's package.json
config.webpacker.check_yarn_integrity = false
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
# config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "dummy_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 42.092784 | 102 | 0.759491 |
e862076aa5475d64232accd580876c8c69667ab0 | 3,217 | module Natalie
class Compiler
class MultipleAssignment
def initialize(exp, value_name)
@exp = exp
@value_name = value_name
end
attr_reader :exp, :value_name
def generate
(_, names, val) = exp
names = names[1..-1]
val = val.last if val.sexp_type == :to_ary
exp.new(:block,
s(:declare, value_name, s(:to_ary, :env, val, :false)),
*paths(exp, value_name))
end
private
def paths(exp, value_name)
masgn_paths(exp).map do |name, path_details|
path = path_details[:path]
if name.is_a?(Sexp)
if name.sexp_type == :splat
if name.size == 1 # nameless splat
s(:block)
else
value = s(:array_value_by_path, :env, value_name, s(:nil), :true, path_details[:offset_from_end], path.size, *path)
masgn_set(name.last, value)
end
else
default_value = name.size == 3 ? name.pop : s(:nil)
value = s(:array_value_by_path, :env, value_name, default_value, :false, 0, path.size, *path)
masgn_set(name, value)
end
else
raise "unknown masgn type: #{name.inspect} (#{exp.file}\##{exp.line})"
end
end
end
# Ruby blows the stack at around this number, so let's limit Natalie as well.
# Anything over a few dozen is pretty crazy, actually.
MAX_MASGN_PATH_INDEX = 131_044
def masgn_paths(exp, prefix = [])
(_, (_, *names)) = exp
splatted = false
names_without_kwargs = names.reject { |n| n.is_a?(Sexp) && n.sexp_type == :kwarg }
names.each_with_index.each_with_object({}) do |(e, index), hash|
raise 'destructuring assignment is too big' if index > MAX_MASGN_PATH_INDEX
if e.is_a?(Sexp) && e.sexp_type == :masgn
hash.merge!(masgn_paths(e, prefix + [index]))
elsif e.sexp_type == :splat
splatted = true
hash[e] = { path: prefix + [index], offset_from_end: names_without_kwargs.size - index - 1 }
elsif e.sexp_type == :kwsplat
splatted = true
hash[e] = { path: prefix + [index], offset_from_end: names.size - index - 1 }
elsif splatted
hash[e] = { path: prefix + [(names.size - index) * -1] }
else
hash[e] = { path: prefix + [index] }
end
end
end
def masgn_set(exp, value)
case exp.sexp_type
when :cdecl
exp.new(:const_set, :self, s(:intern, exp.last), value)
when :gasgn
exp.new(:global_set, :env, s(:intern, exp.last), value)
when :iasgn
exp.new(:ivar_set, :self, :env, s(:intern, exp.last), value)
when :lasgn, :kwarg
exp.new(:var_set, :env, s(:s, exp[1]), value)
when :attrasgn
(_, receiver, message, attr) = exp
args = s(:args, attr, value)
exp.new(:public_send, receiver, s(:intern, message), args)
else
raise "unknown masgn type: #{exp.inspect} (#{exp.file}\##{exp.line})"
end
end
end
end
end
| 35.351648 | 131 | 0.54212 |
79aa5121bc466302ec10ef88c483811b10d95eef | 98 | Spree::Core::Engine.routes.draw do
namespace :admin do
resources :odmailers
end
end
| 16.333333 | 34 | 0.683673 |
1ca61375ed14751e4b1f6127d217542dc0de9569 | 8,083 | require 'rails_helper'
feature 'Course Coaches Index', js: true do
include UserSpecHelper
include SubmissionsHelper
# Setup a course with a single founder target, ...
let!(:school) { create :school, :current }
let!(:school_2) { create :school }
let!(:course_1) { create :course, school: school }
let!(:course_2) { create :course, school: school }
let!(:coach_1) { create :faculty, school: school }
let!(:coach_2) { create :faculty, school: school }
let!(:coach_3) { create :faculty, school: school }
let!(:coach_4) { create :faculty, school: school }
let!(:coach_5) { create :faculty, school: school_2 }
let!(:coach_6) { create :faculty, school: school, exited: true }
let!(:c1_level) { create :level, course: course_1 }
let!(:c2_level) { create :level, course: course_2 }
let!(:startup_c1) { create :startup, level: c1_level }
let!(:startup_c2) { create :startup, level: c2_level }
let(:team_with_one_student) { create :team, level: c2_level }
let!(:lone_student) { create :founder, startup: team_with_one_student }
let!(:school_admin) { create :school_admin, school: school }
before do
create :faculty_course_enrollment, faculty: coach_1, course: course_1
create :faculty_course_enrollment, faculty: coach_2, course: course_1
create :faculty_startup_enrollment, :with_course_enrollment, faculty: coach_2, startup: startup_c1
create :faculty_startup_enrollment, :with_course_enrollment, faculty: coach_3, startup: startup_c2
create :faculty_startup_enrollment, :with_course_enrollment, faculty: coach_3, startup: team_with_one_student
create :faculty_startup_enrollment, :with_course_enrollment, faculty: coach_4, startup: startup_c2
end
scenario 'school admin assigns faculty to a course' do
sign_in_user school_admin.user, referer: school_course_coaches_path(course_1)
# list all coaches
expect(page).to have_text(coach_1.name)
expect(page).to have_text(coach_2.name)
expect(course_1.faculty.count).to eq(2)
click_button 'Assign Coaches to Course'
expect(page).to have_text('No coaches selected')
find("div[title='Select #{coach_4.name}']").click
click_button 'Add Course Coaches'
within('div[aria-label="List of course coaches"]') do
expect(page).to have_text(coach_4.name)
end
expect(course_1.reload.faculty.count).to eq(3)
end
scenario 'school admin removes a course coach' do
sign_in_user school_admin.user, referer: school_course_coaches_path(course_1)
expect(page).to have_text(coach_1.name)
expect(coach_2.startups.count).to eq(1)
accept_confirm do
find("div[aria-label='Delete #{coach_2.name}']").click
end
expect(page).to_not have_text(coach_2.name)
expect(course_1.faculty.count).to eq(1)
expect(course_1.faculty.first).to eq(coach_1)
# Removes the coach team enrollment as well
expect(coach_2.startups.count).to eq(0)
end
scenario 'school admin checks teams assigned to a coach and deletes them' do
sign_in_user school_admin.user, referer: school_course_coaches_path(course_2)
expect(page).to have_text(coach_3.name)
find("div[aria-label='coach-card-#{coach_3.id}']").click
expect(page).to have_text('Students assigned to coach')
expect(page).to have_text(coach_3.email)
within("div[aria-label='Team #{startup_c2.name}']") do
expect(page).to have_text(startup_c2.founders.first.name)
expect(page).to have_text(startup_c2.founders.last.name)
expect(page).to have_text(startup_c2.name)
end
within("div[aria-label='Team #{team_with_one_student.name}']") do
expect(page).to have_text(lone_student.name)
expect(page).to_not have_text(team_with_one_student.name)
end
accept_confirm do
click_button "Delete #{startup_c2.name}"
end
expect(page).to_not have_text(startup_c2.name)
accept_confirm do
click_button "Delete #{team_with_one_student.name}"
end
expect(page).to have_text('There are no students assigned to this coach.')
expect(coach_3.startups.count).to eq(0)
expect(coach_3.courses.count).to eq(1)
expect(course_2.faculty.count).to eq(2)
end
scenario 'user who is not logged in gets redirected to sign in page' do
visit school_course_coaches_path(course_1)
expect(page).to have_text("Please sign in to continue.")
end
context 'when a coach is assigned as a team coach to students in multiple courses' do
let!(:startup_c1_2) { create :startup, level: c1_level }
before do
create :faculty_startup_enrollment, :with_course_enrollment, faculty: coach_3, startup: startup_c1_2
end
scenario 'user sees team assignments for coaches in the list' do
sign_in_user school_admin.user, referer: school_course_coaches_path(course_2)
# Check teams assigned to coach_3 in course 2
find("div[aria-label='coach-card-#{coach_3.id}']").click
expect(page).to have_text(startup_c2.name)
expect(page).not_to have_text(startup_c1_2.name)
end
end
context 'when a coach has reviewed and pending submissions' do
let(:startup_c1_2) { create :startup, level: c1_level }
let(:target_group_c1) { create :target_group, level: c1_level }
let(:target_group_c2) { create :target_group, level: c2_level }
let(:evaluation_criteria_c1) { create :evaluation_criterion, course: course_1 }
let(:evaluation_criteria_c2) { create :evaluation_criterion, course: course_2 }
let(:target_c1_1) { create :target, :for_founders, target_group: target_group_c1 }
let(:target_c1_2) { create :target, :for_team, target_group: target_group_c1 }
let(:target_c1_3) { create :target, :for_founders, target_group: target_group_c1 }
let(:target_c2) { create :target, :for_founders, target_group: target_group_c2 }
before do
# Make all of these targets evaluated.
target_c1_1.evaluation_criteria << evaluation_criteria_c1
target_c1_2.evaluation_criteria << evaluation_criteria_c1
target_c1_3.evaluation_criteria << evaluation_criteria_c1
target_c2.evaluation_criteria << evaluation_criteria_c2
# Enroll the coach directly onto one startup in this course, an another in a different course.
create :faculty_startup_enrollment, :with_course_enrollment, faculty: coach_1, startup: startup_c1
create :faculty_startup_enrollment, :with_course_enrollment, faculty: coach_1, startup: startup_c2
# Put a few submissions in the two targets in course 1.
first_student = startup_c1.founders.first
second_student = startup_c1.founders.last
complete_target(target_c1_1, first_student, evaluator: coach_1)
complete_target(target_c1_3, first_student, evaluator: coach_1)
submit_target(target_c1_1, second_student, evaluator: coach_1)
complete_target(target_c1_3, second_student, evaluator: coach_1)
# Submission graded by another coach in the same course shouldn't be counted.
complete_target(target_c1_2, first_student, evaluator: coach_2)
# Pending submission from another team without direct enrollment shouldn't be counted.
submit_target(target_c1_2, startup_c1_2.founders.first, evaluator: coach_1)
# A submission pending review by this coach in another course should not be counted.
submit_target(target_c2, startup_c2.founders.first, evaluator: coach_1)
# A submission reviewed by this coach in another course should not be counted.
complete_target(target_c2, startup_c2.founders.second, evaluator: coach_1)
end
scenario 'admin checks the counts of pending and reviewed submissions on an assigned coach' do
sign_in_user school_admin.user, referer: school_course_coaches_path(course_1)
# Check teams assigned to coach_3 in course 2
find("div[aria-label='coach-card-#{coach_1.id}']").click
within('div[aria-label="Reviewed Submissions"') do
expect(page).to have_text('3')
end
within('div[aria-label="Pending Submissions"') do
expect(page).to have_text('1')
end
end
end
end
| 40.61809 | 113 | 0.731535 |
380b1d38036171cf94af8c844a90aebda228530c | 8,941 | # typed: false
# frozen_string_literal: true
describe "globally-scoped helper methods" do
let(:dir) { mktmpdir }
def esc(code)
/(\e\[\d+m)*\e\[#{code}m/
end
describe "#ofail" do
it "sets Homebrew.failed to true" do
expect {
ofail "foo"
}.to output("Error: foo\n").to_stderr
expect(Homebrew).to have_failed
end
end
describe "#odie" do
it "exits with 1" do
expect(self).to receive(:exit).and_return(1)
expect {
odie "foo"
}.to output("Error: foo\n").to_stderr
end
end
describe "#pretty_installed" do
subject(:pretty_installed_output) { pretty_installed("foo") }
context "when $stdout is a TTY" do
before { allow($stdout).to receive(:tty?).and_return(true) }
context "with HOMEBREW_NO_EMOJI unset" do
it "returns a string with a colored checkmark" do
expect(pretty_installed_output)
.to match(/#{esc 1}foo #{esc 32}✔#{esc 0}/)
end
end
context "with HOMEBREW_NO_EMOJI set" do
before { ENV["HOMEBREW_NO_EMOJI"] = "1" }
it "returns a string with colored info" do
expect(pretty_installed_output)
.to match(/#{esc 1}foo \(installed\)#{esc 0}/)
end
end
end
context "when $stdout is not a TTY" do
before { allow($stdout).to receive(:tty?).and_return(false) }
it "returns plain text" do
expect(pretty_installed_output).to eq("foo")
end
end
end
describe "#pretty_uninstalled" do
subject(:pretty_uninstalled_output) { pretty_uninstalled("foo") }
context "when $stdout is a TTY" do
before { allow($stdout).to receive(:tty?).and_return(true) }
context "with HOMEBREW_NO_EMOJI unset" do
it "returns a string with a colored checkmark" do
expect(pretty_uninstalled_output)
.to match(/#{esc 1}foo #{esc 31}✘#{esc 0}/)
end
end
context "with HOMEBREW_NO_EMOJI set" do
before { ENV["HOMEBREW_NO_EMOJI"] = "1" }
it "returns a string with colored info" do
expect(pretty_uninstalled_output)
.to match(/#{esc 1}foo \(uninstalled\)#{esc 0}/)
end
end
end
context "when $stdout is not a TTY" do
before { allow($stdout).to receive(:tty?).and_return(false) }
it "returns plain text" do
expect(pretty_uninstalled_output).to eq("foo")
end
end
end
describe "#interactive_shell" do
let(:shell) { dir/"myshell" }
it "starts an interactive shell session" do
IO.write shell, <<~SH
#!/bin/sh
echo called > "#{dir}/called"
SH
FileUtils.chmod 0755, shell
ENV["SHELL"] = shell
expect { interactive_shell }.not_to raise_error
expect(dir/"called").to exist
end
end
describe "#with_custom_locale" do
it "temporarily overrides the system locale" do
ENV["LC_ALL"] = "en_US.UTF-8"
with_custom_locale("C") do
expect(ENV["LC_ALL"]).to eq("C")
end
expect(ENV["LC_ALL"]).to eq("en_US.UTF-8")
end
end
describe "#which" do
let(:cmd) { dir/"foo" }
before { FileUtils.touch cmd }
it "returns the first executable that is found" do
cmd.chmod 0744
expect(which(File.basename(cmd), File.dirname(cmd))).to eq(cmd)
end
it "skips non-executables" do
expect(which(File.basename(cmd), File.dirname(cmd))).to be nil
end
it "skips malformed path and doesn't fail" do
# 'which' should not fail if a path is malformed
# see https://github.com/Homebrew/legacy-homebrew/issues/32789 for an example
cmd.chmod 0744
# ~~ will fail because ~foo resolves to foo's home and there is no '~' user
path = ["~~", File.dirname(cmd)].join(File::PATH_SEPARATOR)
expect(which(File.basename(cmd), path)).to eq(cmd)
end
end
describe "#which_all" do
let(:cmd1) { dir/"foo" }
let(:cmd2) { dir/"bar/foo" }
let(:cmd3) { dir/"bar/baz/foo" }
before do
(dir/"bar/baz").mkpath
FileUtils.touch cmd2
[cmd1, cmd3].each do |cmd|
FileUtils.touch cmd
cmd.chmod 0744
end
end
it "returns an array of all executables that are found" do
path = [
"#{dir}/bar/baz",
"#{dir}/baz:#{dir}",
"~baduserpath",
].join(File::PATH_SEPARATOR)
expect(which_all("foo", path)).to eq([cmd3, cmd1])
end
end
specify "#which_editor" do
ENV["HOMEBREW_EDITOR"] = "vemate -w"
ENV["HOMEBREW_PATH"] = dir
editor = "#{dir}/vemate"
FileUtils.touch editor
FileUtils.chmod 0755, editor
expect(which_editor).to eq("vemate -w")
end
specify "#gzip" do
mktmpdir do |path|
somefile = path/"somefile"
FileUtils.touch somefile
expect(gzip(somefile)[0].to_s).to eq("#{somefile}.gz")
expect(Pathname.new("#{somefile}.gz")).to exist
end
end
specify "#capture_stderr" do
err = capture_stderr do
$stderr.print "test"
end
expect(err).to eq("test")
end
describe "#pretty_duration" do
it "converts seconds to a human-readable string" do
expect(pretty_duration(1)).to eq("1 second")
expect(pretty_duration(2.5)).to eq("2 seconds")
expect(pretty_duration(42)).to eq("42 seconds")
expect(pretty_duration(240)).to eq("4 minutes")
expect(pretty_duration(252.45)).to eq("4 minutes 12 seconds")
end
end
specify "#disk_usage_readable" do
expect(disk_usage_readable(1)).to eq("1B")
expect(disk_usage_readable(1000)).to eq("1000B")
expect(disk_usage_readable(1024)).to eq("1KB")
expect(disk_usage_readable(1025)).to eq("1KB")
expect(disk_usage_readable(4_404_020)).to eq("4.2MB")
expect(disk_usage_readable(4_509_715_660)).to eq("4.2GB")
end
describe "#number_readable" do
it "returns a string with thousands separators" do
expect(number_readable(1)).to eq("1")
expect(number_readable(1_000)).to eq("1,000")
expect(number_readable(1_000_000)).to eq("1,000,000")
end
end
specify "#truncate_text_to_approximate_size" do
glue = "\n[...snip...]\n" # hard-coded copy from truncate_text_to_approximate_size
n = 20
long_s = "x" * 40
s = truncate_text_to_approximate_size(long_s, n)
expect(s.length).to eq(n)
expect(s).to match(/^x+#{Regexp.escape(glue)}x+$/)
s = truncate_text_to_approximate_size(long_s, n, front_weight: 0.0)
expect(s).to eq(glue + ("x" * (n - glue.length)))
s = truncate_text_to_approximate_size(long_s, n, front_weight: 1.0)
expect(s).to eq(("x" * (n - glue.length)) + glue)
end
describe "#odeprecated" do
it "raises a MethodDeprecatedError when `disable` is true" do
ENV.delete("HOMEBREW_DEVELOPER")
expect {
odeprecated(
"method", "replacement",
caller: ["#{HOMEBREW_LIBRARY}/Taps/homebrew/homebrew-core/"],
disable: true
)
}.to raise_error(
MethodDeprecatedError,
%r{method.*replacement.*homebrew/core.*/Taps/homebrew/homebrew-core/}m,
)
end
end
describe "#with_env" do
it "sets environment variables within the block" do
expect(ENV["PATH"]).not_to eq("/bin")
with_env(PATH: "/bin") do
expect(ENV["PATH"]).to eq("/bin")
end
end
it "restores ENV after the block" do
with_env(PATH: "/bin") do
expect(ENV["PATH"]).to eq("/bin")
end
expect(ENV["PATH"]).not_to eq("/bin")
end
it "restores ENV if an exception is raised" do
expect {
with_env(PATH: "/bin") do
raise StandardError, "boom"
end
}.to raise_error(StandardError)
expect(ENV["PATH"]).not_to eq("/bin")
end
end
describe "#tap_and_name_comparison" do
describe "both strings are only names" do
it "alphabetizes the strings" do
expect(%w[a b].sort(&tap_and_name_comparison)).to eq(%w[a b])
expect(%w[b a].sort(&tap_and_name_comparison)).to eq(%w[a b])
end
end
describe "both strings include tap" do
it "alphabetizes the strings" do
expect(%w[a/z/z b/z/z].sort(&tap_and_name_comparison)).to eq(%w[a/z/z b/z/z])
expect(%w[b/z/z a/z/z].sort(&tap_and_name_comparison)).to eq(%w[a/z/z b/z/z])
expect(%w[z/a/z z/b/z].sort(&tap_and_name_comparison)).to eq(%w[z/a/z z/b/z])
expect(%w[z/b/z z/a/z].sort(&tap_and_name_comparison)).to eq(%w[z/a/z z/b/z])
expect(%w[z/z/a z/z/b].sort(&tap_and_name_comparison)).to eq(%w[z/z/a z/z/b])
expect(%w[z/z/b z/z/a].sort(&tap_and_name_comparison)).to eq(%w[z/z/a z/z/b])
end
end
describe "only one string includes tap" do
it "prefers the string without tap" do
expect(%w[a/z/z z].sort(&tap_and_name_comparison)).to eq(%w[z a/z/z])
expect(%w[z a/z/z].sort(&tap_and_name_comparison)).to eq(%w[z a/z/z])
end
end
end
end
| 28.205047 | 86 | 0.614473 |
5d8eaa8f8d9f091e0279cbb129f32d055932d315 | 636 | # encoding: utf-8
$:.unshift File.expand_path('../lib', __FILE__)
require 'travis/config/version'
Gem::Specification.new do |s|
s.name = "travis-config"
s.version = TravisConfig::VERSION
s.authors = ["Travis CI"]
s.email = "[email protected]"
s.homepage = "https://github.com/travis-ci/travis-core"
s.summary = "Travis CI config"
s.description = "#{s.summary}."
s.license = "MIT"
s.files = Dir['{lib/**/*,spec/**/*,[A-Z]*}']
s.platform = Gem::Platform::RUBY
s.require_path = 'lib'
s.rubyforge_project = '[none]'
s.add_dependency 'hashr', '~> 0.0'
end
| 27.652174 | 61 | 0.591195 |
1a64847065b3d3adc5d8c8e387240c899089fd2d | 3,174 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
module Components
module WorkPackages
class TableConfigurationModal
include Capybara::DSL
include RSpec::Matchers
def initialize; end
def self.do_and_save
new.tap do |modal|
yield modal
modal.save
end
end
def open_and_switch_to(name)
open!
switch_to(name)
end
def open_and_set_display_mode(mode)
open_and_switch_to 'Display settings'
choose("display_mode_switch", option: mode)
end
def open!
scroll_to_and_click trigger
expect_open
end
def set_display_sums(enable: true)
open_and_switch_to 'Display settings'
if enable
check 'display_sums_switch'
else
uncheck 'display_sums_switch'
end
save
end
def save
find("#{selector} .button.-highlight").click
end
def cancel
find("#{selector} .button", text: 'Cancel').click
end
def expect_open
expect(page).to have_selector(selector, wait: 40)
end
def expect_closed
expect(page).to have_no_selector(selector)
end
def expect_disabled_tab(name)
expect(page).to have_selector("#{selector} .tab-show.-disabled", text: name)
end
def selected_tab(name)
page.find("#{selector} .tab-show.selected", text: name)
page.find("#{selector} .tab-content[data-tab-name='#{name}']")
end
def switch_to(target)
# Switching too fast may result in the click handler not yet firing
# so wait a bit initially
sleep 1
retry_block do
find("#{selector} .tab-show", text: target, wait: 10).click
selected_tab(target)
end
end
def selector
'.wp-table--configuration-modal'
end
private
def trigger
find('.wp-table--configuration-modal--trigger')
end
end
end
end
| 26.898305 | 91 | 0.655955 |
6a3248b73b9773c9386ef55ee5c252796f0568aa | 726 | cask 'konica-minolta-bizhub-c220-c280-c360-driver' do
version '3.11.0,201606.27043313'
sha256 '236d1fd8acf2888ac48668eebf02aa8de567d0f481142e2ac9d4d7b2a9100587'
# konicaminolta.com was verified as official when first introduced to the cask
url "https://o.cses.konicaminolta.com/file/Default.aspx?FilePath=DL/#{version.after_comma.major}/#{version.after_comma.minor}/BHC360PSMacOS109_#{version.before_comma.no_dots}MU.dmg"
name 'Konica Minolta Bizhub C220/C280/C360 PostScript Printer Driver'
homepage 'https://www.konicaminolta.eu/en/business-solutions/support/download-center.html'
depends_on macos: '>= :mavericks'
pkg 'bizhub_C360_109.pkg'
uninstall pkgutil: 'jp.konicaminolta.print.package.C360'
end
| 45.375 | 183 | 0.797521 |
33f72b2e16c285ec32aa092476be3be235ecf2fd | 291 | class ApplicationController < ActionController::API
before_action :configure_permitted_parameters, if: :devise_controller?
protected
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:sign_up, keys: [:name, :email, :password, :password_confirmation])
end
end | 32.333333 | 105 | 0.810997 |
281c076c386511f07b9476c5301f9405707f750e | 8,786 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require_relative 'update_data_asset_details'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Details for the Autonomous Transaction Processing data asset type.
class DataIntegration::Models::UpdateDataAssetFromFusionApp < DataIntegration::Models::UpdateDataAssetDetails
# The service url of the BI Server.
# @return [String]
attr_accessor :service_url
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'model_type': :'modelType',
'key': :'key',
'model_version': :'modelVersion',
'name': :'name',
'description': :'description',
'object_status': :'objectStatus',
'object_version': :'objectVersion',
'identifier': :'identifier',
'external_key': :'externalKey',
'asset_properties': :'assetProperties',
'registry_metadata': :'registryMetadata',
'service_url': :'serviceUrl'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'model_type': :'String',
'key': :'String',
'model_version': :'String',
'name': :'String',
'description': :'String',
'object_status': :'Integer',
'object_version': :'Integer',
'identifier': :'String',
'external_key': :'String',
'asset_properties': :'Hash<String, String>',
'registry_metadata': :'OCI::DataIntegration::Models::RegistryMetadata',
'service_url': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :key The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#key #key} proprety
# @option attributes [String] :model_version The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#model_version #model_version} proprety
# @option attributes [String] :name The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#name #name} proprety
# @option attributes [String] :description The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#description #description} proprety
# @option attributes [Integer] :object_status The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#object_status #object_status} proprety
# @option attributes [Integer] :object_version The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#object_version #object_version} proprety
# @option attributes [String] :identifier The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#identifier #identifier} proprety
# @option attributes [String] :external_key The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#external_key #external_key} proprety
# @option attributes [Hash<String, String>] :asset_properties The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#asset_properties #asset_properties} proprety
# @option attributes [OCI::DataIntegration::Models::RegistryMetadata] :registry_metadata The value to assign to the {OCI::DataIntegration::Models::UpdateDataAssetDetails#registry_metadata #registry_metadata} proprety
# @option attributes [String] :service_url The value to assign to the {#service_url} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
attributes['modelType'] = 'FUSION_APP_DATA_ASSET'
super(attributes)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.service_url = attributes[:'serviceUrl'] if attributes[:'serviceUrl']
raise 'You cannot provide both :serviceUrl and :service_url' if attributes.key?(:'serviceUrl') && attributes.key?(:'service_url')
self.service_url = attributes[:'service_url'] if attributes[:'service_url']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
model_type == other.model_type &&
key == other.key &&
model_version == other.model_version &&
name == other.name &&
description == other.description &&
object_status == other.object_status &&
object_version == other.object_version &&
identifier == other.identifier &&
external_key == other.external_key &&
asset_properties == other.asset_properties &&
registry_metadata == other.registry_metadata &&
service_url == other.service_url
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[model_type, key, model_version, name, description, object_status, object_version, identifier, external_key, asset_properties, registry_metadata, service_url].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 43.280788 | 245 | 0.688595 |
03980aab0c46aa654aa8534103eaa31c97a02af3 | 1,837 | module Utils
class Bottles
class << self
def tag
if MacOS.version >= :lion
MacOS.cat
elsif MacOS.version == :snow_leopard
Hardware::CPU.is_64_bit? ? :snow_leopard : :snow_leopard_32
else
# Return, e.g., :tiger_g3, :leopard_g5_64, :leopard_64 (which is Intel)
if Hardware::CPU.type == :ppc
tag = "#{MacOS.cat}_#{Hardware::CPU.family}".to_sym
else
tag = MacOS.cat
end
MacOS.prefer_64_bit? ? "#{tag}_64".to_sym : tag
end
end
end
class Collector
private
alias original_find_matching_tag find_matching_tag
def find_matching_tag(tag)
original_find_matching_tag(tag) || find_altivec_tag(tag) || find_or_later_tag(tag)
end
# This allows generic Altivec PPC bottles to be supported in some
# formulae, while also allowing specific bottles in others; e.g.,
# sometimes a formula has just :tiger_altivec, other times it has
# :tiger_g4, :tiger_g5, etc.
def find_altivec_tag(tag)
return unless tag.to_s =~ /(\w+)_(g4|g4e|g5)$/
altivec_tag = "#{Regexp.last_match(1)}_altivec".to_sym
altivec_tag if key?(altivec_tag)
end
# Allows a bottle tag to specify a specific OS or later,
# so the same bottle can target multiple OSs.
def find_or_later_tag(tag)
begin
tag_version = MacOS::Version.from_symbol(tag)
rescue ArgumentError
return
end
keys.find do |key|
if key.to_s.end_with?("_or_later")
later_tag = key.to_s[/(\w+)_or_later$/, 1].to_sym
MacOS::Version.from_symbol(later_tag) <= tag_version
elsif ARGV.force_bottle?
true
end
end
end
end
end
end
| 30.616667 | 90 | 0.597714 |
1a9563780cd8b3c3d40729b152d4f117b753127f | 837 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'codebreaker/version'
Gem::Specification.new do |spec|
spec.name = "codebreaker"
spec.version = Codebreaker::VERSION
spec.authors = ["Sergey Prokopchuk"]
spec.email = ["[email protected]"]
spec.summary = %q{Ruby gem}
spec.description = %q{Ruby gem a Codebreaker}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
end
| 34.875 | 74 | 0.639188 |
bb86cc1b7fa29fad69023c7dfe7a91552b7938bf | 1,910 | require File.expand_path('../spec_helper', __FILE__)
describe Cequel::Record::Timestamps do
model :Blog do
key :subdomain, :text
column :name, :text
timestamps
end
model :Post do
key :blog_subdomain, :text
key :id, :timeuuid, auto: true
column :name, :text
timestamps
end
let!(:now) { Timecop.freeze }
context 'with simple primary key' do
let!(:blog) { Blog.create!(subdomain: 'bigdata') }
it 'should populate created_at after create new record' do
expect(blog.created_at).to be_within(one_millisecond).of(now)
end
it 'should populate updated_at after create new record' do
expect(blog.updated_at).to be_within(one_millisecond).of(now)
end
it 'should update updated_at after record update but not created_at' do
future = Timecop.freeze(now + 2.minutes)
blog.name = 'name'
blog.save!
expect(blog.updated_at).to be_within(one_millisecond).of(future)
end
it 'should cast the timestamp in the same way that Cassandra records it' do
expect(Blog.first.updated_at).to eq(blog.updated_at)
end
end
context 'with auto-generated timeuuid primary key' do
let!(:post) { Post['bigdata'].create! }
it 'should not have created_at column' do
expect(Post.column_names).not_to include(:created_at)
end
it 'should expose created_at' do
expect(post.created_at).to be_within(one_millisecond).of(now)
end
it 'should populate updated_at after create new record' do
expect(post.updated_at).to be_within(one_millisecond).of(now)
end
it 'should update updated_at after record update but not created_at' do
future = Timecop.freeze(now + 2.minutes)
post.name = 'name'
post.save!
expect(post.created_at).to be_within(one_millisecond).of(now)
expect(post.updated_at).to be_within(one_millisecond).of(future)
end
end
end
| 28.939394 | 79 | 0.691623 |
91d17b3ddb001e49969bde341486c644e0dea8b3 | 4,639 | include_recipe 'dpkg_autostart'
require "base64"
include_recipe 'bcpc-hadoop::hadoop_config'
%w{hadoop-hdfs-namenode hadoop-hdfs-zkfc}.each do |pkg|
dpkg_autostart pkg do
allow false
end
package pkg do
action :upgrade
end
end
node[:bcpc][:hadoop][:mounts].each do |d|
directory "/disk/#{d}/dfs/nn" do
owner "hdfs"
group "hdfs"
mode 0755
action :create
recursive true
end
directory "/disk/#{d}/dfs/namedir" do
owner "hdfs"
group "hdfs"
mode 0700
action :create
recursive true
end
execute "fixup nn owner" do
command "chown -Rf hdfs:hdfs /disk/#{d}/dfs"
only_if { Etc.getpwuid(File.stat("/disk/#{d}/dfs/").uid).name != "hdfs" }
end
end
bash "format namenode" do
code "hdfs namenode -format -nonInteractive -force"
user "hdfs"
action :run
creates "/disk/#{node[:bcpc][:hadoop][:mounts][0]}/dfs/nn/current/VERSION"
not_if { node[:bcpc][:hadoop][:mounts].any? { |d| File.exists?("/disk/#{d}/dfs/nn/current/VERSION") } }
end
bash "format-zk-hdfs-ha" do
code "yes | hdfs zkfc -formatZK"
action :run
user "hdfs"
notifies :restart, "service[generally run hadoop-hdfs-namenode]", :delayed
not_if { zk_formatted? }
end
service "hadoop-hdfs-zkfc" do
supports :status => true, :restart => true, :reload => false
action [:enable, :start]
subscribes :restart, "template[/etc/hadoop/conf/hdfs-site.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/hdfs-site_HA.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/hdfs-policy.xml]", :delayed
end
# need to bring the namenode down to initialize shared edits
service "bring hadoop-hdfs-namenode down for shared edits and HA transition" do
service_name "hadoop-hdfs-namenode"
action :stop
supports :status => true
notifies :run, "bash[initialize-shared-edits]", :immediately
only_if { node[:bcpc][:hadoop][:mounts].all? { |d| not File.exists?("/disk/#{d}/dfs/jn/#{node.chef_environment}/current/VERSION") } }
end
bash "initialize-shared-edits" do
code "hdfs namenode -initializeSharedEdits"
user "hdfs"
action :nothing
end
service "generally run hadoop-hdfs-namenode" do
action [:enable, :start]
supports :status => true, :restart => true, :reload => false
service_name "hadoop-hdfs-namenode"
subscribes :restart, "template[/etc/hadoop/conf/hdfs-site.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/hdfs-policy.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/hdfs-site_HA.xml]", :delayed
subscribes :restart, "bash[initialize-shared-edits]", :immediately
end
## We need to bootstrap the standby and journal node transaction logs
# The -bootstrapStandby and -initializeSharedEdits don't actually work
# when the namenode starts up, because it is in safemode and won't commit
# a txn.
# So we fake the formatting of the txn directories by copying over current/VERSION
# this tricks the journalnodes and namenodes into thinking they've been formatted.
ruby_block "grab the format UUID File" do
block do
Dir.chdir("/disk/#{node[:bcpc][:hadoop][:mounts][0]}/dfs/") do
system("tar czvf #{Chef::Config[:file_cache_path]}/nn_fmt.tgz nn/current/VERSION jn/#{node.chef_environment}/current/VERSION")
end
make_config("namenode_txn_fmt", Base64.encode64(IO.read("#{Chef::Config[:file_cache_path]}/nn_fmt.tgz")));
end
action :nothing
subscribes :run, "service[generally run hadoop-hdfs-namenode]", :immediately
only_if { File.exists?("/disk/#{node[:bcpc][:hadoop][:mounts][0]}/dfs/nn/current/VERSION") }
end
bash "reload hdfs nodes" do
code "hdfs dfsadmin -refreshNodes"
user "hdfs"
action :nothing
subscribes :run, "template[/etc/hadoop/conf/dfs.exclude]", :delayed
end
###
# We only want to execute this once, as it is setup of dirs within HDFS.
# We'd prefer to do it after all nodes are members of the HDFS system
#
bash "create-hdfs-temp" do
code "hadoop fs -mkdir /tmp; hadoop fs -chmod -R 1777 /tmp"
user "hdfs"
not_if "sudo -u hdfs hadoop fs -test -d /tmp"
end
bash "create-hdfs-user" do
code "hadoop fs -mkdir /user; hadoop fs -chmod -R 0755 /user"
user "hdfs"
not_if "sudo -u hdfs hadoop fs -test -d /user"
end
bash "create-hdfs-history" do
code "hadoop fs -mkdir /user/history; hadoop fs -chmod -R 1777 /user/history; hadoop fs -chown yarn /user/history"
user "hdfs"
not_if "sudo -u hdfs hadoop fs -test -d /user/history"
end
bash "create-hdfs-yarn-log" do
code "hadoop fs -mkdir -p /var/log/hadoop-yarn; hadoop fs -chown yarn:mapred /var/log/hadoop-yarn"
user "hdfs"
not_if "sudo -u hdfs hadoop fs -test -d /var/log/hadoop-yarn"
end | 33.615942 | 135 | 0.705756 |
ed0fda078d1389f3bcf27c42cd5c98f0c28a8301 | 277 | module RRImm
class Publisher
# will receive in order:
# - formatted item
# - raw feed
# - raw item
def publish(*args)
raise "You have to implement this method"
end
end
end
require_relative 'publisher/pipe'
require_relative 'publisher/reddit'
| 18.466667 | 47 | 0.67148 |
1abe4254421a5397a8b7fccef7fbbb4fce092d7f | 1,118 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "blog/gem/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "blog-gem"
s.version = Blog::Gem::VERSION
s.authors = ["Vincent Thelang", "Michael Hoffmann"]
s.email = ["[email protected]", "[email protected]"]
s.homepage = "https://www.shubbl.de/blog"
s.summary = "Blog Tool for the Shubbl Homepage"
s.description = "Blog Tool for the Shubbl Homepage"
s.license = "MIT"
s.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
s.add_dependency "rails", "~> 5.0.4"
s.add_dependency 'sass-rails', '~> 5.0'
s.add_dependency "slim-rails"
s.add_dependency "paperclip"
s.add_dependency "acts-as-taggable-on"
s.add_dependency "stringex"
s.add_dependency "gravtastic"
s.add_dependency 'bcrypt', '~> 3.1.7'
s.add_dependency 'will_paginate'
s.add_dependency 'will_paginate-bootstrap'
s.add_dependency 'simple_form'
s.add_dependency 'pg_search'
s.add_dependency 'nokogiri'
end
| 32.882353 | 83 | 0.683363 |
5de64678d33d72a8cd105f7af8539f6b81eb73ad | 1,012 | require 'spec_helper_integration'
feature 'Implicit Grant Flow Errors' do
background do
config_is_set(:authenticate_resource_owner) { User.first || redirect_to('/sign_in') }
client_exists
create_resource_owner
sign_in
end
after do
access_token_should_not_exist
end
[
[:client_id, :invalid_client],
[:redirect_uri, :invalid_redirect_uri],
].each do |error|
scenario "displays #{error.last.inspect} error for invalid #{error.first.inspect}" do
visit authorization_endpoint_url(:client => @client, error.first => "invalid", :response_type => "token")
i_should_not_see "Authorize"
i_should_see_translated_error_message error.last
end
scenario "displays #{error.last.inspect} error when #{error.first.inspect} is missing" do
visit authorization_endpoint_url(:client => @client, error.first => "", :response_type => "token")
i_should_not_see "Authorize"
i_should_see_translated_error_message error.last
end
end
end
| 31.625 | 111 | 0.719368 |
1af8d1ec800c889680437b57512392716e137409 | 9,237 | =begin
#Influx API Service
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.0.0-beta3
=end
require 'date'
require 'time'
module InfluxDB2::API
class ResourceMember
attr_accessor :id
attr_accessor :oauth_id
attr_accessor :name
# If inactive the user is inactive.
attr_reader :status
attr_accessor :links
attr_reader :role
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key
def self.attribute_map
{
:'id' => :'id',
:'oauth_id' => :'oauthID',
:'name' => :'name',
:'status' => :'status',
:'links' => :'links',
:'role' => :'role',
}
end
# Attribute type mapping.
def self.openapi_types
{
:'id' => :'String',
:'oauth_id' => :'String',
:'name' => :'String',
:'status' => :'String',
:'links' => :'UserLinks',
:'role' => :'String',
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# List of class defined in allOf (OpenAPI v3)
def self.openapi_all_of
[
:'ResourceMemberAllOf',
:'User'
]
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `InfluxDB2::ResourceMember` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `InfluxDB2::ResourceMember`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'id')
self.id = attributes[:'id']
end
if attributes.key?(:'oauth_id')
self.oauth_id = attributes[:'oauth_id']
end
if attributes.key?(:'name')
self.name = attributes[:'name']
end
if attributes.key?(:'status')
self.status = attributes[:'status']
else
self.status = 'active'
end
if attributes.key?(:'links')
self.links = attributes[:'links']
end
if attributes.key?(:'role')
self.role = attributes[:'role']
else
self.role = 'member'
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @name.nil?
invalid_properties.push('invalid value for "name", name cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @name.nil?
status_validator = EnumAttributeValidator.new('String', ["active", "inactive"])
return false unless status_validator.valid?(@status)
role_validator = EnumAttributeValidator.new('String', ["member"])
return false unless role_validator.valid?(@role)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] status Object to be assigned
def status=(status)
validator = EnumAttributeValidator.new('String', ["active", "inactive"])
unless validator.valid?(status)
fail ArgumentError, "invalid value for \"status\", must be one of #{validator.allowable_values}."
end
@status = status
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] role Object to be assigned
def role=(role)
validator = EnumAttributeValidator.new('String', ["member"])
unless validator.valid?(role)
fail ArgumentError, "invalid value for \"role\", must be one of #{validator.allowable_values}."
end
@role = role
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
oauth_id == o.oauth_id &&
name == o.name &&
status == o.status &&
links == o.links &&
role == o.role
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[id, oauth_id, name, status, links, role, ].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
InfluxDB2::API.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.865625 | 203 | 0.605608 |
1825bc86b522eaad8698c21b9c45ec7329a9bb8a | 5,008 | #
# Be sure to run `pod spec lint QRCodeScan.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "QRCodeScan"
s.version = "1.0.1"
s.summary = "QR code scanning"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
DESC
s.homepage = "https://github.com/youmyc/QRCodeScan"
# s.screenshots = "https://github.com/youmyc/QRCodeScan/blob/master/qrscan.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "youmyc" => "[email protected]" }
# Or just: s.author = "youmyc"
# s.authors = { "youmyc" => "[email protected]" }
# s.social_media_url = "http://twitter.com/youmyc"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/youmyc/QRCodeScan.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "Classes", "Classes/**/*.{h,m}"
s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 36.289855 | 97 | 0.588059 |
28dec342378e2302a7eec847e05d61d180576979 | 24,612 | #!/usr/bin/env rspec
require 'spec_helper'
provider_class = Puppet::Type.type(:augeasprovider).provider(:default)
describe provider_class do
context "empty provider" do
class Empty < provider_class
end
subject { Empty }
describe "#lens" do
it "should fail as default lens isn't set" do
subject.expects(:fail).with('Lens is not provided').raises
expect { subject.lens }.to raise_error
end
end
describe "#target" do
it "should fail if no default or resource file" do
subject.expects(:fail).with('No target file given').raises
expect { subject.target }.to raise_error
end
it "should return resource file if set" do
subject.target(:target => '/foo').should == '/foo'
end
it "should strip trailing / from resource file" do
subject.target(:target => '/foo/').should == '/foo'
end
end
describe "#resource_path" do
it "should call #target if no resource path block set" do
resource = { :name => 'foo' }
subject.expects(:target).with(resource)
subject.resource_path(resource).should == '/foo'
end
it "should call #target if a resource path block is set" do
resource = { :name => 'foo' }
subject.expects(:target).with(resource)
subject.resource_path { '/files/test' }
subject.resource_path(resource).should == '/files/test'
end
end
describe "#readquote" do
it "should return :double when value is double-quoted" do
subject.readquote('"foo"').should == :double
end
it "should return :single when value is single-quoted" do
subject.readquote("'foo'").should == :single
end
it "should return nil when value is not quoted" do
subject.readquote("foo").should be_nil
end
it "should return nil when value is not properly quoted" do
subject.readquote("'foo").should be_nil
subject.readquote("'foo\"").should be_nil
subject.readquote("\"foo").should be_nil
subject.readquote("\"foo'").should be_nil
end
end
describe "#quoteit" do
it "should not do anything by default for alphanum values" do
subject.quoteit('foo').should == 'foo'
end
it "should double-quote by default for values containing spaces or special characters" do
subject.quoteit('foo bar').should == '"foo bar"'
subject.quoteit('foo&bar').should == '"foo&bar"'
subject.quoteit('foo;bar').should == '"foo;bar"'
subject.quoteit('foo<bar').should == '"foo<bar"'
subject.quoteit('foo>bar').should == '"foo>bar"'
subject.quoteit('foo(bar').should == '"foo(bar"'
subject.quoteit('foo)bar').should == '"foo)bar"'
subject.quoteit('foo|bar').should == '"foo|bar"'
end
it "should call #readquote and use its value when oldvalue is passed" do
subject.quoteit('foo', nil, "'bar'").should == "'foo'"
subject.quoteit('foo', nil, '"bar"').should == '"foo"'
subject.quoteit('foo', nil, 'bar').should == 'foo'
subject.quoteit('foo bar', nil, "'bar'").should == "'foo bar'"
end
it "should double-quote special values when oldvalue is not quoted" do
subject.quoteit('foo bar', nil, 'bar').should == '"foo bar"'
end
it "should use the :quoted parameter when present" do
resource = { }
resource.stubs(:parameters).returns([:quoted])
resource[:quoted] = :single
subject.quoteit('foo', resource).should == "'foo'"
resource[:quoted] = :double
subject.quoteit('foo', resource).should == '"foo"'
resource[:quoted] = :auto
subject.quoteit('foo', resource).should == 'foo'
subject.quoteit('foo bar', resource).should == '"foo bar"'
end
end
describe "#unquoteit" do
it "should not do anything when value is not quoted" do
subject.unquoteit('foo bar').should == 'foo bar'
end
it "should not do anything when value is badly quoted" do
subject.unquoteit('"foo bar').should == '"foo bar'
subject.unquoteit("'foo bar").should == "'foo bar"
subject.unquoteit("'foo bar\"").should == "'foo bar\""
end
it "should return unquoted value" do
subject.unquoteit('"foo bar"').should == 'foo bar'
subject.unquoteit("'foo bar'").should == 'foo bar'
end
end
describe "#parsed_as?" do
context "when text_store is supported" do
it "should return false when text_store fails" do
Augeas.any_instance.expects(:respond_to?).with(:text_store).returns(true)
Augeas.any_instance.expects(:set).with('/input', 'foo').returns(nil)
Augeas.any_instance.expects(:text_store).with('Baz.lns', '/input', '/parsed').returns(false)
subject.parsed_as?('foo', 'bar', 'Baz.lns').should == false
end
it "should return false when path is not found" do
Augeas.any_instance.expects(:respond_to?).with(:text_store).returns(true)
Augeas.any_instance.expects(:set).with('/input', 'foo').returns(nil)
Augeas.any_instance.expects(:text_store).with('Baz.lns', '/input', '/parsed').returns(true)
Augeas.any_instance.expects(:match).with('/parsed/bar').returns([])
subject.parsed_as?('foo', 'bar', 'Baz.lns').should == false
end
it "should return true when path is found" do
Augeas.any_instance.expects(:respond_to?).with(:text_store).returns(true)
Augeas.any_instance.expects(:set).with('/input', 'foo').returns(nil)
Augeas.any_instance.expects(:text_store).with('Baz.lns', '/input', '/parsed').returns(true)
Augeas.any_instance.expects(:match).with('/parsed/bar').returns(['/parsed/bar'])
subject.parsed_as?('foo', 'bar', 'Baz.lns').should == true
end
end
context "when text_store is not supported" do
it "should return true if path is found in tempfile" do
Augeas.any_instance.expects(:respond_to?).with(:text_store).returns(false)
Augeas.any_instance.expects(:text_store).never
Augeas.any_instance.expects(:match).returns(['/files/tmp/aug_text_store20140410-8734-icc4xn/bar'])
subject.parsed_as?('foo', 'bar', 'Baz.lns').should == true
end
end
end
describe "#attr_aug_reader" do
it "should create a class method" do
subject.attr_aug_reader(:foo, {})
subject.method_defined?('attr_aug_reader_foo').should be_true
end
end
describe "#attr_aug_writer" do
it "should create a class method" do
subject.attr_aug_writer(:foo, {})
subject.method_defined?('attr_aug_writer_foo').should be_true
end
end
describe "#attr_aug_accessor" do
it "should call #attr_aug_reader and #attr_aug_writer" do
name = :foo
opts = { :bar => 'baz' }
subject.expects(:attr_aug_reader).with(name, opts)
subject.expects(:attr_aug_writer).with(name, opts)
subject.attr_aug_accessor(name, opts)
end
end
describe "#next_seq" do
it "should return 1 with no paths" do
subject.new.next_seq([]).should == '1'
end
it "should return 1 with only comments" do
subject.new.next_seq(['/files/etc/hosts/#comment[1]']).should == '1'
end
it "should return 2 when 1 exists" do
subject.new.next_seq(['/files/etc/hosts/1']).should == '2'
end
it "should return 42 when 1..41 exists" do
subject.new.next_seq((1..41).map {|n| "/files/etc/hosts/#{n}"}).should == '42'
end
end
end
context "working provider" do
class Test < provider_class
lens { 'Hosts.lns' }
default_file { '/foo' }
resource_path { |r, p| r[:test] }
attr_accessor :resource
end
subject { Test }
let(:tmptarget) { aug_fixture("full") }
let(:thetarget) { tmptarget.path }
let(:resource) { {:target => thetarget} }
# Class methods
describe "#lens" do
it "should allow retrieval of the set lens" do
subject.lens.should == 'Hosts.lns'
end
end
describe "#target" do
it "should allow retrieval of the set default file" do
subject.target.should == '/foo'
end
end
describe "#resource_path" do
it "should call block to get the resource path" do
subject.resource_path(:test => 'bar').should == 'bar'
end
end
describe "#loadpath" do
it "should return nil by default" do
subject.send(:loadpath).should be_nil
end
it "should add libdir/augeas/lenses/ to the loadpath if it exists" do
plugindir = File.join(Puppet[:libdir], 'augeas', 'lenses')
File.expects(:exists?).with(plugindir).returns(true)
subject.send(:loadpath).should == plugindir
end
end
describe "#augopen" do
before do
subject.expects(:augsave!).never
end
context "on Puppet < 3.4.0" do
before :each do
subject.stubs(:supported?).with(:post_resource_eval).returns(false)
end
it "should call Augeas#close when given a block" do
subject.augopen(resource) do |aug|
aug.expects(:close)
end
end
it "should not call Augeas#close when not given a block" do
Augeas.any_instance.expects(:close).never
aug = subject.augopen(resource)
end
end
context "on Puppet >= 3.4.0" do
before :each do
subject.stubs(:supported?).with(:post_resource_eval).returns(true)
end
it "should not call Augeas#close when given a block" do
Augeas.any_instance.expects(:close).never
aug = subject.augopen(resource)
end
it "should not call Augeas#close when not given a block" do
Augeas.any_instance.expects(:close).never
aug = subject.augopen(resource)
end
it "should call Augeas#close when calling post_resource_eval" do
subject.augopen(resource) do |aug|
aug.expects(:close)
subject.post_resource_eval
end
end
end
it "should call #setvars when given a block" do
subject.expects(:setvars)
subject.augopen(resource) { |aug| }
end
it "should not call #setvars when not given a block" do
subject.expects(:setvars).never
aug = subject.augopen(resource)
end
context "with broken file" do
let(:tmptarget) { aug_fixture("broken") }
it "should fail if the file fails to load" do
subject.expects(:fail).with(regexp_matches(/Augeas didn't load #{Regexp.escape(thetarget)} with Hosts.lns: Iterated lens matched less than it should/)).raises
expect { subject.augopen(resource) {} }.to raise_error
end
end
end
describe "#augopen!" do
context "on Puppet < 3.4.0" do
before :each do
subject.stubs(:supported?).with(:post_resource_eval).returns(false)
end
it "should call Augeas#close when given a block" do
subject.augopen!(resource) do |aug|
aug.expects(:close)
end
end
it "should not call Augeas#close when not given a block" do
Augeas.any_instance.expects(:close).never
aug = subject.augopen!(resource)
end
end
context "on Puppet >= 3.4.0" do
before :each do
subject.stubs(:supported?).with(:post_resource_eval).returns(true)
end
it "should not call Augeas#close when given a block" do
Augeas.any_instance.expects(:close).never
aug = subject.augopen!(resource)
end
it "should not call Augeas#close when not given a block" do
Augeas.any_instance.expects(:close).never
aug = subject.augopen!(resource)
end
end
it "should call #setvars when given a block" do
subject.expects(:setvars)
subject.augopen!(resource) { |aug| }
end
it "should not call #setvars when not given a block" do
subject.expects(:setvars).never
aug = subject.augopen!(resource)
end
context "on Puppet < 3.4.0" do
before :each do
subject.stubs(:supported?).with(:post_resource_eval).returns(false)
end
it "should call #augsave when given a block" do
subject.expects(:augsave!)
subject.augopen!(resource) { |aug| }
end
it "should not call #augsave when not given a block" do
subject.expects(:augsave!).never
aug = subject.augopen!(resource)
end
end
context "on Puppet >= 3.4.0" do
before :each do
subject.stubs(:supported?).with(:post_resource_eval).returns(true)
end
it "should not call #augsave when given a block" do
subject.expects(:augsave!).never
subject.augopen!(resource) { |aug| }
end
it "should not call #augsave when not given a block" do
subject.expects(:augsave!).never
aug = subject.augopen!(resource)
end
it "should call Augeas#close when calling post_resource_eval" do
subject.augopen(resource) do |aug|
aug.expects(:close)
subject.post_resource_eval
end
end
end
context "with broken file" do
let(:tmptarget) { aug_fixture("broken") }
it "should fail if the file fails to load" do
subject.expects(:fail).with(regexp_matches(/Augeas didn't load #{Regexp.escape(thetarget)} with Hosts.lns: Iterated lens matched less than it should/)).raises
expect { subject.augopen!(resource) {} }.to raise_error
end
end
context "when raising an exception in the block" do
it "should to raise the right exception" do
expect {
subject.augopen! do |aug|
raise Puppet::Error, "My error"
end
}.to raise_error Puppet::Error, "My error"
end
end
end
describe "#augsave" do
it "should print /augeas//error on save" do
subject.augopen(resource) do |aug|
# Prepare an invalid save
subject.stubs(:debug)
aug.rm("/files#{thetarget}/*/ipaddr").should_not == 0
lambda { subject.augsave!(aug) }.should raise_error Augeas::Error, /Failed to save Augeas tree/
end
end
end
describe "#path_label" do
it "should use Augeas#label when available" do
subject.augopen(resource) do |aug|
aug.expects(:respond_to?).with(:label).returns true
aug.expects(:label).with('/files/foo[2]').returns 'foo'
subject.path_label(aug, '/files/foo[2]').should == 'foo'
end
end
it "should emulate Augeas#label when it is not available" do
subject.augopen(resource) do |aug|
aug.expects(:respond_to?).with(:label).returns false
aug.expects(:label).with('/files/bar[4]').never
subject.path_label(aug, '/files/bar[4]').should == 'bar'
end
end
it "should emulate Augeas#label when no label is found in the tree" do
subject.augopen(resource) do |aug|
aug.expects(:respond_to?).with(:label).returns true
aug.expects(:label).with('/files/baz[15]').returns nil
subject.path_label(aug, '/files/baz[15]').should == 'baz'
end
end
end
describe "#setvars" do
it "should call Augeas#defnode to set $target, Augeas#defvar to set $resource and Augeas#set to set /augeas/context when resource is passed" do
subject.augopen(resource) do |aug|
aug.expects(:set).with('/augeas/context', "/files#{thetarget}")
aug.expects(:defnode).with('target', "/files#{thetarget}", nil)
subject.expects(:resource_path).with(resource).returns('/files/foo')
aug.expects(:defvar).with('resource', '/files/foo')
subject.setvars(aug, resource)
end
end
it "should call Augeas#defnode to set $target but not $resource when no resource is passed" do
subject.augopen(resource) do |aug|
aug.expects(:defnode).with('target', '/files/foo', nil)
aug.expects(:defvar).never
subject.setvars(aug)
end
end
end
describe "#attr_aug_reader" do
it "should create a class method using :string" do
subject.attr_aug_reader(:foo, {})
subject.method_defined?('attr_aug_reader_foo').should be_true
Augeas.any_instance.expects(:get).with('$resource/foo').returns('bar')
subject.augopen(resource) do |aug|
subject.attr_aug_reader_foo(aug).should == 'bar'
end
end
it "should create a class method using :array and no sublabel" do
subject.attr_aug_reader(:foo, { :type => :array })
subject.method_defined?('attr_aug_reader_foo').should be_true
rpath = "/files#{thetarget}/test/foo"
subject.augopen(resource) do |aug|
aug.expects(:match).with('$resource/foo').returns(["#{rpath}[1]", "#{rpath}[2]"])
aug.expects(:get).with("#{rpath}[1]").returns('baz')
aug.expects(:get).with("#{rpath}[2]").returns('bazz')
subject.attr_aug_reader_foo(aug).should == ['baz', 'bazz']
end
end
it "should create a class method using :array and a :seq sublabel" do
subject.attr_aug_reader(:foo, { :type => :array, :sublabel => :seq })
subject.method_defined?('attr_aug_reader_foo').should be_true
rpath = "/files#{thetarget}/test/foo"
subject.augopen(resource) do |aug|
aug.expects(:match).with('$resource/foo').returns(["#{rpath}[1]", "#{rpath}[2]"])
aug.expects(:match).with("#{rpath}[1]/*[label()=~regexp('[0-9]+')]").returns(["#{rpath}[1]/1"])
aug.expects(:get).with("#{rpath}[1]/1").returns('val11')
aug.expects(:match).with("#{rpath}[2]/*[label()=~regexp('[0-9]+')]").returns(["#{rpath}[2]/1", "#{rpath}[2]/2"])
aug.expects(:get).with("#{rpath}[2]/1").returns('val21')
aug.expects(:get).with("#{rpath}[2]/2").returns('val22')
subject.attr_aug_reader_foo(aug).should == ['val11', 'val21', 'val22']
end
end
it "should create a class method using :array and a string sublabel" do
subject.attr_aug_reader(:foo, { :type => :array, :sublabel => 'sl' })
subject.method_defined?('attr_aug_reader_foo').should be_true
rpath = "/files#{thetarget}/test/foo"
subject.augopen(resource) do |aug|
aug.expects(:match).with('$resource/foo').returns(["#{rpath}[1]", "#{rpath}[2]"])
aug.expects(:match).with("#{rpath}[1]/sl").returns(["#{rpath}[1]/sl"])
aug.expects(:get).with("#{rpath}[1]/sl").returns('val11')
aug.expects(:match).with("#{rpath}[2]/sl").returns(["#{rpath}[2]/sl[1]", "#{rpath}[2]/sl[2]"])
aug.expects(:get).with("#{rpath}[2]/sl[1]").returns('val21')
aug.expects(:get).with("#{rpath}[2]/sl[2]").returns('val22')
subject.attr_aug_reader_foo(aug).should == ['val11', 'val21', 'val22']
end
end
it "should create a class method using :hash and no sublabel" do
expect {
subject.attr_aug_reader(:foo, { :type => :hash, :default => 'deflt' })
}.to raise_error(RuntimeError, /You must provide a sublabel/)
end
it "should create a class method using :hash and sublabel" do
subject.attr_aug_reader(:foo, { :type => :hash, :sublabel => 'sl', :default => 'deflt' })
subject.method_defined?('attr_aug_reader_foo').should be_true
rpath = "/files#{thetarget}/test/foo"
subject.augopen(resource) do |aug|
aug.expects(:match).with('$resource/foo').returns(["#{rpath}[1]", "#{rpath}[2]"])
aug.expects(:get).with("#{rpath}[1]").returns('baz')
aug.expects(:get).with("#{rpath}[1]/sl").returns('bazval')
aug.expects(:get).with("#{rpath}[2]").returns('bazz')
aug.expects(:get).with("#{rpath}[2]/sl").returns(nil)
subject.attr_aug_reader_foo(aug).should == { 'baz' => 'bazval', 'bazz' => 'deflt' }
end
end
it "should create a class method using wrong type" do
expect {
subject.attr_aug_reader(:foo, { :type => :foo })
}.to raise_error(RuntimeError, /Invalid type: foo/)
end
end
describe "#attr_aug_writer" do
it "should create a class method using :string" do
subject.attr_aug_writer(:foo, {})
subject.method_defined?('attr_aug_writer_foo').should be_true
subject.augopen(resource) do |aug|
aug.expects(:set).with('$resource/foo', 'bar')
subject.attr_aug_writer_foo(aug, 'bar')
aug.expects(:clear).with('$resource/foo')
subject.attr_aug_writer_foo(aug)
end
end
it "should create a class method using :string with :rm_node" do
subject.attr_aug_writer(:foo, { :rm_node => true })
subject.method_defined?('attr_aug_writer_foo').should be_true
subject.augopen(resource) do |aug|
aug.expects(:set).with('$resource/foo', 'bar')
subject.attr_aug_writer_foo(aug, 'bar')
aug.expects(:rm).with('$resource/foo')
subject.attr_aug_writer_foo(aug)
end
end
it "should create a class method using :array and no sublabel" do
subject.attr_aug_writer(:foo, { :type => :array })
subject.method_defined?('attr_aug_writer_foo').should be_true
subject.augopen(resource) do |aug|
aug.expects(:rm).with('$resource/foo')
aug.expects(:set).with('$resource/foo[1]', 'bar')
subject.attr_aug_writer_foo(aug)
aug.expects(:rm).with('$resource/foo')
aug.expects(:set).with('$resource/foo[2]', 'baz')
subject.attr_aug_writer_foo(aug, ['bar', 'baz'])
end
end
it "should create a class method using :array and a :seq sublabel" do
subject.attr_aug_writer(:foo, { :type => :array, :sublabel => :seq })
subject.method_defined?('attr_aug_writer_foo').should be_true
subject.augopen(resource) do |aug|
aug.expects(:rm).with('$resource/foo')
subject.attr_aug_writer_foo(aug)
aug.expects(:rm).with("$resource/foo/*[label()=~regexp('[0-9]+')]")
aug.expects(:set).with('$resource/foo/1', 'bar')
aug.expects(:set).with('$resource/foo/2', 'baz')
subject.attr_aug_writer_foo(aug, ['bar', 'baz'])
end
end
it "should create a class method using :array and a string sublabel" do
subject.attr_aug_writer(:foo, { :type => :array, :sublabel => 'sl' })
subject.method_defined?('attr_aug_writer_foo').should be_true
subject.augopen(resource) do |aug|
aug.expects(:rm).with('$resource/foo')
subject.attr_aug_writer_foo(aug)
aug.expects(:rm).with('$resource/foo/sl')
aug.expects(:set).with('$resource/foo/sl[1]', 'bar')
aug.expects(:set).with('$resource/foo/sl[2]', 'baz')
subject.attr_aug_writer_foo(aug, ['bar', 'baz'])
end
end
it "should create a class method using :hash and no sublabel" do
expect {
subject.attr_aug_writer(:foo, { :type => :hash, :default => 'deflt' })
}.to raise_error(RuntimeError, /You must provide a sublabel/)
end
it "should create a class method using :hash and sublabel" do
subject.attr_aug_writer(:foo, { :type => :hash, :sublabel => 'sl', :default => 'deflt' })
subject.method_defined?('attr_aug_writer_foo').should be_true
rpath = "/files#{thetarget}/test/foo"
subject.augopen(resource) do |aug|
aug.expects(:rm).with('$resource/foo')
aug.expects(:set).with("$resource/foo[.='baz']", 'baz')
aug.expects(:set).with("$resource/foo[.='baz']/sl", 'bazval')
aug.expects(:set).with("$resource/foo[.='bazz']", 'bazz')
aug.expects(:set).with("$resource/foo[.='bazz']/sl", 'bazzval').never
subject.attr_aug_writer_foo(aug, { 'baz' => 'bazval', 'bazz' => 'deflt' })
end
end
it "should create a class method using wrong type" do
expect {
subject.attr_aug_writer(:foo, { :type => :foo })
}.to raise_error(RuntimeError, /Invalid type: foo/)
end
end
end
end
| 37.290909 | 168 | 0.602105 |
f77a075990fd945cfddf75af71e209c8176bed0d | 8,673 | module CanvasCsv
# Updates users currently present within Canvas.
# Used by CanvasCsv::RefreshAllCampusData to maintain officially enrolled students/faculty
# See CanvasCsv::AddNewUsers for maintenance of new active CalNet users within Canvas
class MaintainUsers < Base
include ClassLogger
attr_accessor :sis_user_id_changes, :user_email_deletions
# Returns true if user hashes are identical
def self.provisioned_account_eq_sis_account?(provisioned_account, sis_account)
# Canvas interprets an empty 'email' column as 'Do not change.'
matched = provisioned_account['login_id'] == sis_account['login_id'] &&
(sis_account['email'].blank? || (provisioned_account['email'] == sis_account['email']))
if matched && Settings.canvas_proxy.maintain_user_names
# Canvas plays elaborate games with user name imports. See the RSpec for examples.
matched = provisioned_account['full_name'] == "#{sis_account['first_name']} #{sis_account['last_name']}"
end
matched
end
# Updates SIS User ID for Canvas User
#
# Because there is no way to do a bulk download of user login objects, two Canvas requests are required to
# set each user's SIS user ID.
def self.change_sis_user_id(canvas_user_id, new_sis_user_id)
logins_proxy = Canvas::Logins.new
response = logins_proxy.user_logins(canvas_user_id)
if (user_logins = response[:body])
# We look for the login with a numeric "unique_id", and assume it is an LDAP UID.
user_logins.select! do |login|
parse_login_id(login['unique_id'])[:ldap_uid]
end
if user_logins.length > 1
logger.error "Multiple numeric logins found for Canvas user #{canvas_user_id}; will skip"
elsif user_logins.empty?
logger.warn "No LDAP UID login found for Canvas user #{canvas_user_id}; will skip"
else
login_object_id = user_logins[0]['id']
logger.debug "Changing SIS ID for user #{canvas_user_id} to #{new_sis_user_id}"
response = logins_proxy.change_sis_user_id(login_object_id, new_sis_user_id)
return true if response[:statusCode] == 200
end
end
false
end
def self.parse_login_id(login_id)
if (matched = /^(inactive-)?([0-9]+)$/.match login_id)
inactive_account = matched[1]
ldap_uid = matched[2].to_i
end
{
ldap_uid: ldap_uid,
inactive_account: inactive_account.present?
}
end
def initialize(known_users, sis_user_import_csv)
super()
@known_users = known_users
@user_import_csv = sis_user_import_csv
@sis_user_id_changes = {}
@user_email_deletions = []
end
# Appends account changes to the given CSV.
# Appends all known user IDs to the input array.
# Makes any necessary changes to SIS user IDs.
def refresh_existing_user_accounts
check_all_user_accounts
handle_changed_sis_user_ids
if Settings.canvas_proxy.delete_bad_emails.present?
handle_email_deletions @user_email_deletions
else
logger.warn "EMAIL DELETION BLOCKED: Would delete email addresses for #{@user_email_deletions.length} inactive users: #{@user_email_deletions}"
end
end
def check_all_user_accounts
users_csv_file = "#{Settings.canvas_proxy.export_directory}/provisioned-users-#{DateTime.now.strftime('%F-%H-%M')}.csv"
users_csv_file = Canvas::Report::Users.new(download_to_file: users_csv_file).get_csv
if users_csv_file.present?
accounts_batch = []
CSV.foreach(users_csv_file, headers: true) do |account_row|
accounts_batch << account_row
if accounts_batch.length == 1000
compare_to_campus(accounts_batch)
accounts_batch = []
end
end
compare_to_campus(accounts_batch) if accounts_batch.present?
end
end
# Any changes to SIS user IDs must take effect before the enrollments CSV is generated.
# Otherwise, the generated CSV may include a new ID that does not match the existing ID for a user account.
def handle_changed_sis_user_ids
if Settings.canvas_proxy.dry_run_import.present?
logger.warn "DRY RUN MODE: Would change #{@sis_user_id_changes.length} SIS user IDs #{@sis_user_id_changes.inspect}"
else
logger.warn "About to change #{@sis_user_id_changes.length} SIS user IDs"
@sis_user_id_changes.each do |canvas_user_id, new_sis_id|
succeeded = self.class.change_sis_user_id(canvas_user_id, new_sis_id)
unless succeeded
# If we had ideal data sources, it would be prudent to remove any mention of the no-longer-going-to-be-changed
# SIS User ID from the import CSVs. However, the failure was likely triggered by Canvas's inconsistent
# handling of deleted records, with a deleted user login being completely invisible and yet still capable
# of blocking new records. The only way to make the deleted record available for inspection and clean-up is
# to go on with the import.
logger.error "Canvas user #{canvas_user_id} did not successfully have its SIS ID changed to #{new_sis_id}! Check for duplicated LDAP UIDs in bCourses."
end
end
end
end
def handle_email_deletions(canvas_user_ids)
logger.warn "About to delete email addresses for #{canvas_user_ids.length} inactive users: #{canvas_user_ids}"
canvas_user_ids.each do |canvas_user_id|
proxy = Canvas::CommunicationChannels.new(canvas_user_id: canvas_user_id)
if (channels = proxy.list[:body])
channels.each do |channel|
if channel['type'] == 'email'
channel_id = channel['id']
dry_run = Settings.canvas_proxy.dry_run_import
if dry_run.present?
logger.warn "DRY RUN MODE: Would delete communication channel #{channel}"
else
logger.warn "Deleting communication channel #{channel}"
proxy.delete channel_id
end
end
end
end
end
end
def categorize_user_account(existing_account, campus_user_attributes)
# Convert from CSV::Row for easier manipulation.
old_account_data = existing_account.to_hash
parsed_login_id = self.class.parse_login_id old_account_data['login_id']
ldap_uid = parsed_login_id[:ldap_uid]
inactive_account = parsed_login_id[:inactive_account]
if ldap_uid
campus_user = campus_user_attributes.select { |r| (r[:ldap_uid].to_i == ldap_uid) && !r[:roles][:expiredAccount] }.first
if campus_user.present?
logger.warn "Reactivating account for LDAP UID #{ldap_uid}" if inactive_account
new_account_data = canvas_user_from_campus_attributes campus_user
@known_users[ldap_uid.to_s] = new_account_data['user_id']
else
if Settings.canvas_proxy.inactivate_expired_users
# This LDAP UID no longer appears in campus data. Mark the Canvas user account as inactive.
logger.warn "Inactivating account for LDAP UID #{ldap_uid}" unless inactive_account
if old_account_data['email'].present?
@user_email_deletions << old_account_data['canvas_user_id']
end
new_account_data = old_account_data.merge(
'login_id' => "inactive-#{ldap_uid}",
'user_id' => "UID:#{ldap_uid}",
'email' => nil
)
@known_users[ldap_uid.to_s] = new_account_data['user_id']
else
@known_users[ldap_uid.to_s] = old_account_data['user_id']
return
end
end
if old_account_data['user_id'] != new_account_data['user_id']
logger.warn "Will change SIS ID for user sis_login_id:#{old_account_data['login_id']} from #{old_account_data['user_id']} to #{new_account_data['user_id']}"
@sis_user_id_changes["sis_login_id:#{old_account_data['login_id']}"] = new_account_data['user_id']
end
unless self.class.provisioned_account_eq_sis_account?(old_account_data, new_account_data)
@user_import_csv << new_account_data
end
end
end
def compare_to_campus(accounts_batch)
campus_user_rows = User::BasicAttributes.attributes_for_uids(accounts_batch.collect do |r|
r['login_id'].to_s.gsub(/^inactive-/, '')
end
)
accounts_batch.each do |existing_account|
categorize_user_account(existing_account, campus_user_rows)
end
end
end
end
| 45.888889 | 166 | 0.677044 |
e2cdf3108f7ce00ba7b1ae13a1a9b46caa013b96 | 253 | Deface::Override.new(:virtual_path => 'spree/orders/_line_item',
:name => "add_id_to_tr_line_item",
:set_attributes => 'tr',
:attributes => {:id => 'line-item-<%= line_item.variant_id %>'})
| 50.6 | 85 | 0.517787 |
edd2b78df1bc9ecc4d57425374a5f64f8e353cd3 | 90 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'jquery_image_gallery'
| 30 | 58 | 0.766667 |
4ada9df652aeb27d945ec8485e000e4bb3507b18 | 501 | class SuppliesController < ApplicationController
def new
render :new, :layout => false
end
def show
@supply = Supply.find(params[:id])
end
def edit
@supply = Supply.find(params[:id])
end
def destroy
@supply = Supply.find(params[:id])
@supply.destroy
redirect_to home_path
end
def update
@supply = Supply.find(params[:id])
@supply.update(supply_params)
redirect_to supply_path(@supply)
end
def supply_params
params.require(:supply).permit(:title, :price)
end
end
| 15.65625 | 48 | 0.712575 |
d5d86a14181056797626e975325329888086340d | 1,296 | cask "brave-browser-beta" do
version "91.1.26.60,126.60"
if Hardware::CPU.intel?
sha256 "1939eb8438b1ae1d1ea90458c570b047220ae166c50048d75a5c3cfff049bfac"
url "https://updates-cdn.bravesoftware.com/sparkle/Brave-Browser/beta/#{version.after_comma}/Brave-Browser-Beta-x64.dmg",
verified: "updates-cdn.bravesoftware.com/sparkle/Brave-Browser/"
livecheck do
url "https://updates.bravesoftware.com/sparkle/Brave-Browser/beta/appcast.xml"
strategy :sparkle
end
else
sha256 "0d88eb9fd95fa46c96667feedc6b7fd6ad2dac60ffb570a284474802ae2dd6a9"
url "https://updates-cdn.bravesoftware.com/sparkle/Brave-Browser/beta-arm64/#{version.after_comma}/Brave-Browser-Beta-universal.dmg",
verified: "updates-cdn.bravesoftware.com/sparkle/Brave-Browser/"
livecheck do
url "https://updates.bravesoftware.com/sparkle/Brave-Browser/beta-arm64/appcast.xml"
strategy :sparkle
end
end
name "Brave Beta"
desc "Web browser focusing on privacy"
homepage "https://brave.com/download-beta/"
auto_updates true
app "Brave Browser Beta.app"
zap trash: [
"~/Library/Application Support/brave",
"~/Library/Preferences/com.electron.brave.plist",
"~/Library/Saved Application State/com.electron.brave.savedState",
]
end
| 32.4 | 137 | 0.738426 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.