hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
aba6e84b94e7b240ad56c43295a5a12a22abf45c | 1,869 | class Vimpager < Formula
desc "Use ViM as PAGER"
homepage "https://github.com/rkitover/vimpager"
url "https://github.com/rkitover/vimpager/archive/2.06.tar.gz"
sha256 "cc616d0840a6f2501704eea70de222ab662421f34b2da307e11fb62aa70bda5d"
head "https://github.com/rkitover/vimpager.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "356c5407fc0656ac0b4ae4a7a3e62f1992525a120a7a1d0b6fdd514561d5381d"
sha256 cellar: :any_skip_relocation, big_sur: "9ead831c50c50e1b95b18695737936bc907c9241867d3700c87120c8af09aea7"
sha256 cellar: :any_skip_relocation, catalina: "2a409da1fc4a31e1165e33ed681ed15b874d514721c7295a0901ebf4516aa469"
sha256 cellar: :any_skip_relocation, mojave: "f4ec02de4d30af041e98f3ab4ce6344424f7a8f5bfeca6cf21dc179cbd6e576a"
sha256 cellar: :any_skip_relocation, high_sierra: "73aaa39c9876664b2f0b0a98dea30ea34e05504f28d607276873345d57b97834"
sha256 cellar: :any_skip_relocation, sierra: "308c68e761983beb317bbefcba285022dbc74a66486a3da7e2ac8bc929649a3a"
sha256 cellar: :any_skip_relocation, el_capitan: "eccfe695299ff91b489e0385b2024e6f383426f696dc4a5462fe2e0bc6f875b1"
sha256 cellar: :any_skip_relocation, yosemite: "be8ae8e77106e1fa95821b59171b982af74365693be0b416e41bb807a07c6c60"
sha256 cellar: :any_skip_relocation, x86_64_linux: "30604f02c981295f1cd967cb1ecf5a2afd3c4c51f5d751383c78eafb085e0f10" # glibc 2.19
end
depends_on "pandoc" => :build
def install
system "make", "install", "PREFIX=#{prefix}"
system "make", "docs"
end
def caveats
<<~EOS
To use vimpager as your default pager, add `export PAGER=vimpager` to your
shell configuration.
EOS
end
test do
(testpath/"test.txt").write <<~EOS
This is test
EOS
assert_match(/This is test/, shell_output("#{bin}/vimcat test.txt"))
end
end
| 44.5 | 135 | 0.779026 |
337ca88a3325365dcc7e02708c8cbc1f4c2933cd | 1,183 | # rails_admin-1.0 ready
require 'rails_admin/adapters/mongoid'
module RailsAdmin
module Adapters
module Mongoid
StatementBuilder.class_eval do
def build_statement_for_type
case @type
when :boolean
build_statement_for_boolean
when :integer, :decimal, :float
build_statement_for_integer_decimal_or_float
when :string, :text, :enum_edit # is convenient to search enum_edit fields as strings
build_statement_for_string_or_text
when :enum
build_statement_for_enum
when :belongs_to_association, :bson_object_id
build_statement_for_belongs_to_association_or_bson_object_id
when :json_value
column_for_value(@value)
else
begin
if RailsAdmin::Config::Fields::Types.load(@type) < RailsAdmin::Config::Fields::Types::Text
build_statement_for_string_or_text
else
nil # TODO Build statements for other custom rails_admin types
end
rescue
nil
end
end
end
end
end
end
end
| 31.131579 | 104 | 0.617075 |
26021e7154e0193e06a57d877f4b732b39fde7c0 | 569 | class Object
define_method :singleton_class, instance_method(:metaclass) unless respond_to?(:singleton_class)
def to_bson(*args)
{MongoDoc::BSON::CLASS_KEY => self.class.name}.tap do |bson_hash|
instance_variables.each do |name|
bson_hash[name[1..-1]] = instance_variable_get(name).to_bson(args)
end
end
end
def self.bson_create(bson_hash, options = {})
allocate.tap do |obj|
bson_hash.each do |name, value|
obj.instance_variable_set("@#{name}", MongoDoc::BSON.decode(value, options))
end
end
end
end
| 28.45 | 98 | 0.681898 |
bbf9eeccc3ae868c1a970e56e5a1d174bba8ef3e | 160 | class Docear < Cask
version 'latest'
sha256 :no_check
url 'http://docear.org/download/docear.dmg'
homepage 'http://docear.org'
app 'Docear.app'
end
| 16 | 45 | 0.69375 |
01c7079cb0ed2189416deccd1b055b633f21e4eb | 1,210 | # encoding: UTF-8
module CSKit
module Readers
Reading = Struct.new(:texts, :citation, :chapter, :verse) do
def params
@params || {}
end
def to_annotated_reading
AnnotatedReading.new(texts, citation, chapter, verse)
end
def annotated?
false
end
def to_hash
{
texts: texts,
chapter: chapter.to_hash
}
end
end
class AnnotatedReading < Reading
def add_annotation(text_index, annotation)
if idx = index(text_index, annotation)
annotations[text_index].delete_at(idx)
end
(annotations[text_index] ||= []) << annotation
end
def annotations_at(index)
annotations[index]
end
def annotations
@annotations ||= {}
end
def annotated?
true
end
private
def index(text_index, annotation_to_find)
(annotations[text_index] || []).each_with_index do |annotation, index|
if annotation.start == annotation_to_find.start && annotation.finish == annotation_to_find.finish
return index
end
end
nil
end
end
end
end
| 19.516129 | 107 | 0.57438 |
11534dd596ed47f2deeb992f9240b3bc0ea43ffe | 131 | require 'test_helper'
class McqsControllerTest < ActionController::TestCase
# test "the truth" do
# assert true
# end
end
| 16.375 | 53 | 0.725191 |
bf57c3db9f53688655f1dfe5081c20222d88cc28 | 427 | module Sass
module Tree
class DebugNode < Node
def initialize(expr, options)
@expr = expr
super(options)
end
protected
def _perform(environment)
res = @expr.perform(environment)
if filename
STDERR.puts "#{filename}:#{line} DEBUG: #{res}"
else
STDERR.puts "Line #{line} DEBUG: #{res}"
end
[]
end
end
end
end
| 18.565217 | 57 | 0.529274 |
2152f4df8254a19aefa0c46708b8bf2b454b03ac | 158 | # frozen_string_literal: true
class CreateGroupLessonSummaries < ActiveRecord::Migration[5.2]
def change
create_view :group_lesson_summaries
end
end
| 19.75 | 63 | 0.803797 |
01770c728fac924502780b465eec107d26cd56eb | 926 | require 'tweetstream/configuration'
require 'tweetstream/client'
require 'tweetstream/daemon'
module TweetStream
extend Configuration
class ReconnectError < StandardError
attr_accessor :timeout, :retries
def initialize(timeout, retries)
self.timeout = timeout
self.retries = retries
super("Failed to reconnect after #{retries} tries.")
end
end
class << self
# Alias for TweetStream::Client.new
#
# @return [TweetStream::Client]
def new(options = {})
TweetStream::Client.new(options)
end
# Delegate to TweetStream::Client
def method_missing(method, *args, &block)
return super unless new.respond_to?(method)
new.send(method, *args, &block)
end
# Delegate to TweetStream::Client
def respond_to?(method, include_private = false)
new.respond_to?(method, include_private) || super(method, include_private)
end
end
end
| 25.027027 | 80 | 0.691145 |
7a76b3f390667ce4aadb4805e8bb5b76ae1dd6d8 | 9,476 | class Electrum < Formula
include Language::Python::Virtualenv
desc "Bitcoin thin client"
homepage "https://electrum.org"
url "https://download.electrum.org/3.3.8/Electrum-3.3.8.tar.gz"
sha256 "e2adf191847609d5bd850320f647db6347952b170364a463276db27a836400bc"
revision 1
bottle do
root_url "https://dl.bintray.com/domt4/crypto-bottles"
cellar :any_skip_relocation
sha256 "df0a4fcc55e0d91ba3eb0deba3e381bb9a3cea185daf5ff35a5b23ca1391026d" => :mojave
end
depends_on "protobuf"
depends_on "pyqt"
depends_on "python"
resource "Cython" do
url "https://files.pythonhosted.org/packages/5b/5b/6cba7123a089c4174f944dd05ea7984c8d908aba8746a99f2340dde8662f/Cython-0.29.12.tar.gz"
sha256 "20da832a5e9a8e93d1e1eb64650258956723940968eb585506531719b55b804f"
end
resource "QDarkStyle" do
url "https://files.pythonhosted.org/packages/a4/ec/c6ae1509370f07ef2ac725cfed6add23b94670a5903a834a463440ca295a/QDarkStyle-2.6.8.tar.gz"
sha256 "037a54bf0aa5153f8055b65b8b36ac0d0f7648f2fd906c011a4da22eb0f582a2"
end
resource "aiohttp" do
url "https://files.pythonhosted.org/packages/0f/58/c8b83f999da3b13e66249ea32f325be923791c0c10aee6cf16002a3effc1/aiohttp-3.5.4.tar.gz"
sha256 "9c4c83f4fa1938377da32bc2d59379025ceeee8e24b89f72fcbccd8ca22dc9bf"
end
resource "aiohttp-socks" do
url "https://files.pythonhosted.org/packages/c2/78/3cf7de8bcb047e1969d6b49d7ea50ef4d8254b3f1512721e425ad94ec1c0/aiohttp_socks-0.2.2.tar.gz"
sha256 "eebd8939a7c3c1e3e7e1b2552c60039b4c65ef6b8b2351efcbdd98290538e310"
end
resource "aiorpcX" do
url "https://files.pythonhosted.org/packages/4e/c5/9aacc2f50e06919c3f1d34137d1acea980f2449df7c4f9149c37b306e492/aiorpcX-0.18.3.tar.gz"
sha256 "b7a7ced5df95c79c74f7834e7cc58bb7747dbad9eb37bf7580da507e182ca44c"
end
resource "async_timeout" do
url "https://files.pythonhosted.org/packages/a1/78/aae1545aba6e87e23ecab8d212b58bb70e72164b67eb090b81bb17ad38e3/async-timeout-3.0.1.tar.gz"
sha256 "0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"
end
resource "attrs" do
url "https://files.pythonhosted.org/packages/cc/d9/931a24cc5394f19383fbbe3e1147a0291276afa43a0dc3ed0d6cd9fda813/attrs-19.1.0.tar.gz"
sha256 "f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
end
resource "btchip-python" do
url "https://files.pythonhosted.org/packages/66/26/52b88daa03da39cc8d7178f945af0dcbba46d642ced6ea56e305762568ae/btchip-python-0.1.28.tar.gz"
sha256 "da09d0d7a6180d428833795ea9a233c3b317ddfcccea8cc6f0eba59435e5dd83"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/c5/67/5d0548226bcc34468e23a0333978f0e23d28d0b3f0c71a151aef9c3f7680/certifi-2019.6.16.tar.gz"
sha256 "945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "ckcc-protocol" do
url "https://files.pythonhosted.org/packages/35/9a/a49ee1591dc7d532a6905988934cbd94e44e917f2d5660b2d3d88239ec3f/ckcc-protocol-0.7.7.tar.gz"
sha256 "3c3815342354ccbf63ba7ecfa5f0c27e56a308d7157f1b02835868977659c979"
end
resource "click" do
url "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz"
sha256 "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
end
resource "construct" do
url "https://files.pythonhosted.org/packages/19/c0/f054941fa33d14378de66d2c0477d31f7ad97aa2e298a5771a7b20bc2039/construct-2.9.45.tar.gz"
sha256 "2271a0efd0798679dea825ff47e22a4c550456a5db0ba8baa82f7eae0af0118c"
end
resource "dnspython" do
url "https://files.pythonhosted.org/packages/ec/c5/14bcd63cb6d06092a004793399ec395405edf97c2301dfdc146dfbd5beed/dnspython-1.16.0.zip"
sha256 "36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"
end
resource "ecdsa" do
url "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz"
sha256 "5c034ffa23413ac923541ceb3ac14ec15a0d2530690413bff58c12b80e56d884"
end
resource "hidapi" do
url "https://files.pythonhosted.org/packages/c1/86/89df0e8890f96eeb5fb68d4ccb14cb38e2c2d2cfd7601ba972206acd9015/hidapi-0.7.99.post21.tar.gz"
sha256 "e0be1aa6566979266a8fc845ab0e18613f4918cf2c977fe67050f5dc7e2a9a97"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
end
resource "jsonrpclib-pelix" do
url "https://files.pythonhosted.org/packages/5c/4e/67c832052d6d85731732193b5d58ff9c2c3ec91087324ad5c2d814fc56c9/jsonrpclib-pelix-0.4.0.tar.gz"
sha256 "19c558e169a51480b39548783067ca55046b62b2409ab4559931255e12f635de"
end
resource "keepkey" do
url "https://files.pythonhosted.org/packages/61/f7/5487352c4a724fa864c442938b4b44244beaeec34e1d351916611441345f/keepkey-6.1.0.tar.gz"
sha256 "2e1623409307c86f709054ad191bc7707c4feeacae2e497bd933f2f0054c6eb0"
end
resource "libusb1" do
url "https://files.pythonhosted.org/packages/80/bb/4ee9d760dd29499d877ee384f1d2bc6bb9923defd4c69843aef5e729972d/libusb1-1.7.1.tar.gz"
sha256 "adf64a4f3f5c94643a1286f8153bcf4bc787c348b38934aacd7fe17fbeebc571"
end
resource "mnemonic" do
url "https://files.pythonhosted.org/packages/a4/5a/663362ccceb76035ad50fbc20203b6a4674be1fe434886b7407e79519c5e/mnemonic-0.18.tar.gz"
sha256 "02a7306a792370f4a0c106c2cf1ce5a0c84b9dbd7e71c6792fdb9ad88a727f1d"
end
resource "multidict" do
url "https://files.pythonhosted.org/packages/7f/8f/b3c8c5b062309e854ce5b726fc101195fbaa881d306ffa5c2ba19efa3af2/multidict-4.5.2.tar.gz"
sha256 "024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f"
end
resource "pbkdf2" do
url "https://files.pythonhosted.org/packages/02/c0/6a2376ae81beb82eda645a091684c0b0becb86b972def7849ea9066e3d5e/pbkdf2-1.3.tar.gz"
sha256 "ac6397369f128212c43064a2b4878038dab78dab41875364554aaf2a684e6979"
end
resource "protobuf" do
url "https://files.pythonhosted.org/packages/cd/02/0425c38def9047d77166abdc9bb66dcff2882095c57b952511c85720f03c/protobuf-3.9.0.tar.gz"
sha256 "b3452bbda12b1cbe2187d416779de07b2ab4c497d83a050e43c344778763721d"
end
resource "pyaes" do
url "https://files.pythonhosted.org/packages/44/66/2c17bae31c906613795711fc78045c285048168919ace2220daa372c7d72/pyaes-1.6.1.tar.gz"
sha256 "02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f"
end
resource "pyblake2" do
url "https://files.pythonhosted.org/packages/a6/ea/559658f48713567276cabe1344a9ef918adcb34a9da417dbf0a2f7477d8e/pyblake2-1.1.2.tar.gz"
sha256 "5ccc7eb02edb82fafb8adbb90746af71460fbc29aa0f822526fc976dff83e93f"
end
resource "qrcode" do
url "https://files.pythonhosted.org/packages/19/d5/6c7d4e103d94364d067636417a77a6024219c58cd6e9f428ece9b5061ef9/qrcode-6.1.tar.gz"
sha256 "505253854f607f2abf4d16092c61d4e9d511a3b4392e60bff957a68592b04369"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz"
sha256 "11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"
end
resource "safet" do
url "https://files.pythonhosted.org/packages/94/dd/31e2d333e61d80baa0d24dcb12f890d17e803f7d6f73145a4fa4c41058b4/safet-0.1.4.tar.gz"
sha256 "b152874acdc89ff0c8b2d680bfbf020b3e53527c2ad3404489dd61a548aa56a1"
end
resource "six" do
url "https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"
sha256 "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
end
resource "trezor" do
url "https://files.pythonhosted.org/packages/88/9e/9b84a35c3709cda7b35a211dc9617f38740eb2d3a46a20a1087efb795302/trezor-0.11.3.tar.gz"
sha256 "c79a500e90d003073c8060d319dceb042caaba9472f13990c77ed37d04a82108"
end
resource "typing-extensions" do
url "https://files.pythonhosted.org/packages/59/b6/21774b993eec6e797fbc49e53830df823b69a3cb62f94d36dfb497a0b65a/typing_extensions-3.7.4.tar.gz"
sha256 "2ed632b30bb54fc3941c382decfd0ee4148f5c591651c9272473fea2c6397d95"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/4c/13/2386233f7ee40aa8444b47f7463338f3cbdf00c316627558784e3f542f07/urllib3-1.25.3.tar.gz"
sha256 "dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
end
resource "websocket_client" do
url "https://files.pythonhosted.org/packages/c5/01/8c9c7de6c46f88e70b5a3276c791a2be82ae83d8e0d0cc030525ee2866fd/websocket_client-0.56.0.tar.gz"
sha256 "1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a"
end
resource "yarl" do
url "https://files.pythonhosted.org/packages/fb/84/6d82f6be218c50b547aa29d0315e430cf8a23c52064c92d0a8377d7b7357/yarl-1.3.0.tar.gz"
sha256 "024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9"
end
def install
# https://stackoverflow.com/a/44466013
ENV.delete("PYTHONPATH")
virtualenv_install_with_resources
end
test do
system bin/"electrum", "--help"
end
end
| 46 | 147 | 0.824821 |
e8c68cb0e4490d30f83ceab64b486334c9989b50 | 402 | require 'rails_helper'
RSpec.describe FormsController do
describe '#check_household' do
controller do
def index
head :ok
end
end
it 'redirects to the homepage if no household is set on the session' do
get :index, params: { locale: I18n.default_locale }, session: { current_household_id: nil }
expect(response).to redirect_to(root_path)
end
end
end
| 23.647059 | 97 | 0.68408 |
033db2e0fbb4160e79e9ae7252d5db77c04d5325 | 488 | module Quickbooks
module Service
class SalesReceipt < BaseService
def delete(sales_receipt)
delete_by_query_string(sales_receipt)
end
def pdf(sales_receipt)
url = "#{url_for_resource(model::REST_RESOURCE)}/#{sales_receipt.id}/pdf"
response = do_http_raw_get(url, {}, {'Accept' => 'application/pdf'})
response.plain_body
end
private
def model
Quickbooks::Model::SalesReceipt
end
end
end
end
| 21.217391 | 81 | 0.639344 |
08a65e894e26dae9ae4302915a40f92eccc2364c | 1,362 | # -*- encoding: utf-8 -*-
# stub: formatador 0.2.5 ruby lib
Gem::Specification.new do |s|
s.name = "formatador".freeze
s.version = "0.2.5"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["geemus (Wesley Beary)".freeze]
s.date = "2014-05-23"
s.description = "STDOUT text formatting".freeze
s.email = "[email protected]".freeze
s.extra_rdoc_files = ["README.rdoc".freeze]
s.files = ["README.rdoc".freeze]
s.homepage = "http://github.com/geemus/formatador".freeze
s.rdoc_options = ["--charset=UTF-8".freeze]
s.rubyforge_project = "formatador".freeze
s.rubygems_version = "2.6.11".freeze
s.summary = "Ruby STDOUT text formatting".freeze
s.installed_by_version = "2.6.11" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 2
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<shindo>.freeze, [">= 0"])
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<shindo>.freeze, [">= 0"])
end
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<shindo>.freeze, [">= 0"])
end
end
| 34.923077 | 112 | 0.668135 |
d55e81216afd3d3639bc1bdd3bfe590155691f4e | 678 | #encoding: utf-8
module PostProcessors
# Turns references to options into links.
#
# While we should do our best to link to options we are not always consistent
# with it. This processor ensure that we are.
class OptionLinker
class << self
def link!(content)
content.scan(/ `([a-zA-Z][a-zA-Z_.\*]*)`/).collect do |matches|
option = matches.first
section_name = option.end_with?(".*") ? option.sub(/\.\*$/, '') : option
if content.include?("## #{section_name}")
content.gsub!(/ `#{option}`/, " [`#{option}`](##{section_name.slugify})")
end
end
content
end
end
end
end
| 27.12 | 85 | 0.575221 |
1a3b405329942b38355bc93365bc27b2730e45c8 | 408 |
# Time complexity: O(n)
# Space Complexity: O(n)
# P(n) = P(P(n - 1)) + P(n - P(n - 1)
def newman_conway(num)
if num < 1
raise ArgumentError.new
end
return "1" if num == 1
return "1 1" if num == 2
result = [0,1,1]
for i in (3..num) do
answer = result[result[i - 1]] + result[i - result[i - 1]]
result.append(answer)
end
return result[1,result.length - 1].join(" ")
end | 18.545455 | 62 | 0.563725 |
39a95f8294ef7d2e09dd41667dbb2da4a7618035 | 9,974 | require 'date'
require 'faraday'
require 'multi_xml'
module CTA
class BusTracker
class VehiclesResponse < CTA::API::Response
# @return [Array<CTA::Bus>] An array with a full {CTA::Bus} object for each vehicle returned in the API, augmented
# with live details
attr_reader :vehicles
def initialize(parsed_body, raw_body, debug)
super(parsed_body, raw_body, debug)
@vehicles = Array.wrap(parsed_body["bustime_response"]["vehicle"]).map do |v|
bus = CTA::Bus.find_active_run(v["rt"], v["tmstmp"], (v["dly"] == "true"), v["rtdir"]).first
bus.live = CTA::Bus::Live.new(v)
bus
end
end
end
class TimeResponse < CTA::API::Response
# @return [DateTime] Current time according to the BusTime servers which power the BusTracker API
attr_reader :timestamp
def initialize(parsed_body, raw_body, debug)
super(parsed_body, raw_body, debug)
@timestamp = DateTime.parse(parsed_body["bustime_response"]["tm"])
end
end
class RoutesResponse < CTA::API::Response
# @return [Array<CTA::Route>] An array with a full {CTA::Route} object for each route returned by the API,
# augmented with the color that the API thinks you should be using (which is not always found in the GTFS data).
attr_reader :routes
def initialize(parsed_body, raw_body, debug)
super(parsed_body, raw_body, debug)
@routes = Array.wrap(parsed_body["bustime_response"]["route"]).map do |r|
rt = CTA::Route.where(:route_id => r["rt"]).first
rt.route_color = r["rtclr"]
rt
end
end
end
class DirectionsResponse < CTA::API::Response
# @return [Array<Direction>] An array of {Direction} that the requested route operates.
attr_reader :directions
def initialize(parsed_body, raw_body, debug)
super(parsed_body, raw_body, debug)
@directions = Array.wrap(parsed_body["bustime_response"]["dir"]).map { |d| Direction.new(d) }
end
end
class StopsResponse < CTA::API::Response
# @return [Array<CTA::Stop>] An array with full {CTA::Stop} objects that correspond to the stops returned from the API.
# @note Some stops returned from BusTracker are not found in GTFS data, so cta_redux creates them on the fly. These
# stops are for seasonal routes. An email has been sent to the CTA asking why they're not included in the GTFS data (they should be).
attr_reader :stops
def initialize(parsed_body, raw_body, debug)
super(parsed_body, raw_body, debug)
@stops = Array.wrap(parsed_body["bustime_response"]["stop"]).map do |s|
CTA::Stop.where(:stop_id => s["stpid"]).first || CTA::Stop.new_from_api_response(s)
end
end
end
class PatternsResponse < CTA::API::Response
# @return [Array<Pattern>] An array of {Pattern} objects for the requested query.
attr_reader :patterns
def initialize(parsed_body, raw_body, debug)
super(parsed_body, raw_body, debug)
@patterns = Array.wrap(parsed_body["bustime_response"]["ptr"]).map { |p| Pattern.new(p) }
end
end
class PredictionsResponse < CTA::API::Response
# @return [Array<CTA::Bus>] An array of {CTA::Bus} objects that correspond to the predictions requested.
attr_reader :vehicles
# @return [Array<CTA::Bus::Prediction>] An array of {CTA::Bus::Prediction} objects that correspond to the predictions requested.
# This is equivalent to calling +vehicles.map { |b| b.live.predictions }.flatten+
attr_reader :predictions
def initialize(parsed_body, raw_body, debug)
super(parsed_body, raw_body, debug)
@vehicles = Array.wrap(parsed_body["bustime_response"]["prd"]).map do |p|
bus = CTA::Bus.find_active_run(p["rt"], p["tmstmp"], (p["dly"] == "true"), p["rtdir"]).first
bus.live = CTA::Bus::Live.new(p, p)
bus
end
@predictions = @vehicles.map { |b| b.live.predictions }.flatten
end
end
class ServiceBulletinsResponse < CTA::API::Response
# @return [Array<ServiceBulletin>] An array of {ServiceBulletin} objects that correspond to the query requested.
# @note Consider using the {CTA::CustomerAlerts} methods to search for alerts, as theoretically they should have the same
# data and it is not a rate-limited API.
attr_reader :bulletins
def initialize(parsed_body, raw_body, debug)
super(parsed_body, raw_body, debug)
@bulletins = Array.wrap(parsed_body["bustime_response"]["sb"]).map { |sb| ServiceBulletin.new(sb) }
end
end
class ServiceBulletin
# @return [String] The name of the bulletin.
attr_reader :name
# @return [String] A short description of the bulletin.
attr_reader :subject
# @return [String] More details about the bulletin
attr_reader :details
# @return [String] Another short description of the bulletin
# @note This seems to usually be unset by the CTA.
attr_reader :brief
# @return [Symbol] Priority of the alert. One of +[:low, :medium, :high]+
attr_reader :priority
# @return [Array<Service>] An array of {Service} objects that encapsulate information (if any) about which routes and stops are affected by this bulletin.
attr_reader :affected_services
def initialize(sb)
@name = sb["nm"]
@subject = sb["sbj"]
@details = sb["dtl"]
@brief = sb["brf"]
@priority = sb["prty"].downcase.to_sym
@affected_services = Array.wrap(sb["srvc"]).map { |svc| Service.new(svc) }
end
end
class Service
# @return [CTA::Route] A {CTA::Route}, if any, affected by a {ServiceBulletin}
attr_reader :route
# @return [Direction] A {Direction} object for the direction, if any, affected by a {ServiceBulletin}
attr_reader :direction
# @return [CTA::Stop] A specific {CTA::Stop} object for the stop affected by a {ServiceBulletin}
attr_reader :stop
# @return [String] The name of the {CTA::Stop} affected.
# @note Usually this is equivalent to calling +stop.name+, but sometimes the CTA returns a {ServiceBulletin} with a stop name,
# but no stop id set - and the stop name may not exactly correspond to a {CTA::Stop} object in the GTFS feed.
attr_reader :stop_name
def initialize(s)
@route = CTA::Route.where(:route_id => s["rt"]).first
@direction = Direction.new(s["rtdir"]) if s["rtdir"]
if s["stpid"]
@stop = CTA::Stop.where(:stop_id => s["stpid"]).first || CTA::Stop.new_from_api_response(s)
@stop_name = @stop.name
else
@stop_name = s["stpnm"] # ugh
end
end
def predictions!
options = { :route => self.route }
options.merge!({ :stop => self.stop_id }) if self.stop_id
CTA::BusTracker.predictions!(options)
end
end
# @note {Pattern} objects enclose {Point} objects that describe a bus route. Conceptually it is similar to how a {CTA::Trip} contains
# many {CTA::StopTime} objects that describe the route a vehicle takes. However, it is specific to busses and contains better information
# for drawing turns and side-streets that the bus may take on its route. This bit of the API is mostly unnecessary unless you're drawing
# maps.
class Pattern
# @return [Integer] The ID of the pattern
attr_reader :id
# @return [Integer] The ID of the pattern
attr_reader :pattern_id
# @return [Integer] The total length of the pattern
attr_reader :length
# @return [Direction] A {Direction} object that describes to which direction of a route this pattern applies.
# @note This logically means that any given bus route (so long as it's not a circulator or one-way express) will have
# two associated {Pattern} objects
attr_reader :direction
# @return [Array<Point>] An array of {Point} objects that describe stops and waypoints along the {Pattern}
attr_reader :points
def initialize(p)
@id = @pattern_id = p["pid"].to_i
@length = p["ln"].to_f
@direction = Direction.new(p["rtdir"])
@points = Array.wrap(p["pt"]).map { |pnt| Point.new(pnt) }
end
end
class Point
# @return [Integer] The order that this {Point} appears along a {Pattern}
attr_reader :sequence
# @return [Float] The latitude of this {Point}
attr_reader :lat
# @return [Float] The longitude of this {Point}
attr_reader :lon
# @return [Float] The latitude of this {Point}
attr_reader :latitude
# @return [Float] The longitude of this {Point}
attr_reader :longitude
# @return [Symbol] The type of this {Point}. One of +[:stop, :waypoint]+
attr_reader :type
# @return [CTA::Stop] The {CTA::Stop} associated with this point.
attr_reader :stop
# @return [Float] The physical distance into a {Pattern} that corresponds to this {Point}
attr_reader :distance
def initialize(p)
@sequence = p["seq"].to_i
@lat = @latitude = p["lat"].to_f
@lon = @longitude = p["lon"].to_f
@type = (p["typ"] == "S" ? :stop : :waypoint)
@stop = CTA::Stop.where(:stop_id => p["stpid"]).first || CTA::Stop.new_from_api_response(p)
@distance = p["pdist"].to_f if p["pdist"]
end
def <=>(other)
self.sequence <=> other.sequence
end
end
class Direction
# @return [String] A direction for a service.
# @note The raw BusTracker API expects directions in the exact format this object returns. This is mostly an implementation detail, but
# explains a bit about why this object even exists.
attr_reader :direction
def initialize(d)
@direction = d
end
end
end
end
| 41.045267 | 160 | 0.645378 |
62bfe80ad4eba43e219014c3236308ab8f6db827 | 3,997 | require "digest/sha3"
class StatsApiThread < Thread
attr_reader :stats
CONTRACT_ADDR = "0x341e09f0Aa71Cb742537Aa4B946621E2d458a1E9"
DEFAULT_STATS = {}
def initialize(every:, on_change: nil, parity:)
@on_change = on_change
@parity = parity
@stats = DEFAULT_STATS
super(every) do |every|
loop do
begin
work
rescue => e
puts "ERROR", e, e.backtrace
ensure
sleep every
end
end
end
rescue => e
puts "ERROR", e, e.backtrace
end
private
def on_change(*val)
@on_change.call(*val) if @on_change
end
def hashrate(mining_target, spr)
hashrate = 2**256 / mining_target / spr
end
def work
puts "updating stats"
s = @parity.batch_call({
currentEthBlock: ["eth_blockNumber", nil],
name: "name()",
symbol: "symbol()",
decimals: "decimals()",
tokensMinted: "tokensMinted()",
ldps: "latestDifficultyPeriodStarted()",
miningTarget: "miningTarget()",
challengeNumber: "challengeNumber()",
rewardEra: "rewardEra()",
maxSupplyForEra: "maxSupplyForEra()",
circulatingSupply: "tokensMinted()",
lastRewardTo: "lastRewardTo()",
lastRewardAmount: "lastRewardAmount()",
lrebn: "lastRewardEthBlockNumber()",
epochCount: "epochCount()",
maximumTarget: "_MAXIMUM_TARGET()",
minimumTarget: "_MINIMUM_TARGET()",
bpr: "_BLOCKS_PER_READJUSTMENT()",
totalSupply: "totalSupply()"
}, contract_addr: CONTRACT_ADDR)
difficulty = s[:maximumTarget] / s[:miningTarget]
rsr = s[:epochCount] % 1024
ebsldp = s[:currentEthBlock] - s[:ldps]
ssr = ebsldp * 15.0
spr = rsr > 0 ? ssr / rsr : 600
hr = hashrate(s[:miningTarget], spr)
dec_units = 10**s[:decimals]
stats = {
apiVersion: "1.01",
name: s[:name],
symbol: s[:symbol],
contractUrl: "https://bscscan.com//address/#{CONTRACT_ADDR}",
contractAddress: CONTRACT_ADDR,
decimals: dec_units,
difficulty: difficulty,
minimumTarget: s[:minimumTarget].to_s,
maximumTarget: s[:maximumTarget].to_s,
miningTarget: s[:miningTarget].to_s,
challengeNumber: s[:challengeNumber],
rewardEra: s[:rewardEra].to_s,
maxSupplyForEra: s[:maxSupplyForEra],
blocksPerReadjustment: s[:bpr],
latestDifficultyPeriodStarted: s[:ldps],
circulatingSupply: (s[:tokensMinted] / dec_units.to_f).to_i,
totalSupply: (s[:totalSupply] / dec_units.to_f).to_f,
lastRewardTo: "0x%040x" % s[:lastRewardTo].to_i(16),
lastRewardAmount: s[:lastRewardAmount],
lastRewardEthBlockNumber: s[:lrebn],
currentEthBlock: s[:currentEthBlock],
ethBlocksSinceLastDifficultyPeriod: ebsldp,
secondsPerReward: spr,
hashrateEstimate: hr,
hashrateEstimateDescription: "%0.2f GH/s" % (hr * 1e-9),
rewardsSinceReadjustment: rsr
}
changed = stats != @stats
@stats = stats # atomic replace
on_change @stats if changed
puts "done updating stats"
end
end
if $0 == __FILE__
# for testing
require "./parity_rpc"
config = YAML::load_file("config.yml")
parity = ParityRPC.new(url: config["provider"])
sat = StatsApiThread.new(parity: parity, every: 10, on_change: Proc.new { puts sat.stats })
sleep
end
| 34.456897 | 93 | 0.534401 |
33c9f50e5aa14438c1780a0be14bf15c8188610c | 3,521 | require 'spec_helper'
require 'rack/test'
module Bosh::Director
module Api
describe Extensions::Scoping do
include Rack::Test::Methods
let(:config) do
config = Config.load_hash(SpecHelper.spec_get_director_config)
identity_provider = Support::TestIdentityProvider.new(config.get_uuid_provider)
allow(config).to receive(:identity_provider).and_return(identity_provider)
config
end
let(:app) { Support::TestController.new(config, true) }
describe 'scope' do
context 'when authorization is provided'do
context 'as admin'
before { basic_authorize('admin', 'admin') }
context 'when scope is defined on a route' do
it 'passes it to identity provider' do
expect(get('/read').status).to eq(200)
end
end
context 'when scope is not defined on a route' do
it 'uses default scope' do
expect(get('/test_route').status).to eq(200)
end
end
context 'when scope is set for request params' do
it 'uses defined scope on specified param' do
expect(get('/params?name=test').status).to eq(200)
end
it 'uses default scope on non-specified param' do
expect(get('/params?name=other').status).to eq(200)
end
end
end
context 'when user does not have access' do
before { basic_authorize('reader', 'reader') }
it 'returns a detailed error message' do
get '/test_route'
expect(last_response.status).to eq(401)
expect(last_response.body).to include('Require one of the scopes:')
end
context 'when scope is set for request params' do
it 'uses defined scope on specified param' do
expect(get('/params?name=test').status).to eq(200)
end
it 'uses default scope on non-specified param' do
expect(get('/params?name=other').status).to eq(401)
end
end
context 'when identity provider is not UAA' do
let(:identity_provider) { Api::LocalIdentityProvider.new({}) }
it 'return generic error message' do
get '/test_route'
expect(last_response.status).to eq(401)
expect(last_response.body).to include('Require one of the scopes:')
end
end
end
context 'when authorization is not provided' do
context 'when controller does not require authorization' do
class NonsecureController < Bosh::Director::Api::Controllers::BaseController
def requires_authentication?
false
end
get '/' do
'Success'
end
end
let(:app) { NonsecureController.new(config) }
it 'succeeds' do
get '/'
expect(last_response.status).to eq(200)
expect(last_response.body).to include('Success')
end
end
context 'when controller requires authorization' do
it 'returns non-authorized' do
get '/read'
expect(last_response.status).to eq(401)
expect(last_response.body).to include("Not authorized: '/read'\n")
end
end
end
end
end
end
end
| 32.601852 | 88 | 0.559784 |
01a3a3b0fa41c0aac48b81ffaa3235aa0bd3f307 | 1,084 | require 'stringio'
module FSR
class FakeSocket
def initialize(remote_host, remote_port)
@remote_host, @remote_port = remote_host, remote_port
@input = StringIO.new('')
@buffer = []
end
def hostname
'localhost'
end
def address
'127.0.0.1'
end
def eof?
@input.eof?
end
alias closed? eof?
def close
end
def print(*args)
@buffer << args.join
end
def read(len)
@input.read(len)
end
def fake_input
@input
end
def fake_buffer
@buffer
end
end
end
require 'bacon'
Bacon.summary_at_exit
describe FSR::FakeSocket do
it 'can be initialized' do
@socket = FSR::FakeSocket.new('google.com', 80)
@socket.should.not.be.nil
end
it 'can be filled with input which is then read' do
@socket.fake_input.write('foobar')
@socket.fake_input.pos = 0
@socket.read(6).should == 'foobar'
@socket.read(1).should == nil
end
it 'can receive input' do
@socket.print('foo')
@socket.fake_buffer.should == ['foo']
end
end
| 16.179104 | 59 | 0.607934 |
4a39e96972170740ccc14ac244e21cc399baf9cf | 822 | require 'coveralls'
Coveralls.wear!
$: << "#{File.dirname(__FILE__)}/../lib"
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].sort.each {|f| require f}
require 'pry'
require 'vcr'
require 'gather_content'
require 'climate_control'
# require 'figs'
# Figs.load()
VCR.configure do |c|
c.filter_sensitive_data('api-id') { GatherContent::Api::Config.instance.username }
c.filter_sensitive_data('api-key') { GatherContent::Api::Config.instance.api_key }
c.filter_sensitive_data('gather_content_api_host') { ENV['GATHER_CONTENT_API_HOST'] }
c.default_cassette_options = { record: :new_episodes, allow_playback_repeats: true }
c.cassette_library_dir = 'spec/vcr_cassettes'
c.configure_rspec_metadata!
c.hook_into :webmock
end
def with_modified_env(options, &block)
ClimateControl.modify(options, &block)
end
| 30.444444 | 87 | 0.749392 |
397226bfd8c0270d9cfaa2ec49eb50766bd393f9 | 4,685 | require 'rails_helper'
RSpec.describe 'API Error', type: :request do
let!(:person) { Person.create! first_name: 'Test', last_name: 'Person' }
let!(:email) { person.emails.create(address: '[email protected]') }
subject { JSON.parse(response.body) }
describe 'Onsi::Errors::UnknownVersionError' do
before { get "/api/v2/people/#{person.id}/emails/#{email.id}" }
it { expect(response).to have_http_status 400 }
it { expect(subject['data']).to be_nil }
it { expect(subject['errors']).to be_a(Array) }
it { expect(subject.dig('errors', 0, 'status')).to eq '400' }
it { expect(subject.dig('errors', 0, 'code')).to eq 'invalid_version' }
end
describe 'ActiveRecord::RecordNotFound' do
before { get "/api/v1/people/#{person.id}/emails/#{email.id + 4}" }
it { expect(response).to have_http_status 404 }
it { expect(subject['data']).to be_nil }
it { expect(subject['errors']).to be_a(Array) }
it { expect(subject.dig('errors', 0, 'status')).to eq '404' }
it { expect(subject.dig('errors', 0, 'code')).to eq 'not_found' }
end
describe 'StandardError' do
before do
expect_any_instance_of(PeopleController).to receive(:index) do
raise ArgumentError, 'Passed 2, expected 4'
end
get "/api/v1/people"
end
it { expect(response).to have_http_status 500 }
it { expect(subject['data']).to be_nil }
it { expect(subject['errors']).to be_a(Array) }
it { expect(subject.dig('errors', 0, 'status')).to eq '500' }
it { expect(subject.dig('errors', 0, 'code')).to eq 'internal_server_error' }
end
describe 'ActiveRecord::RecordInvalid' do
before do
body = JSON.dump(
data: {
type: 'email',
attributes: {
address: 'foo'
}
}
)
post "/api/v1/people/#{person.id}/emails", params: body, headers: { 'Content-Type' => 'application/json' }
end
it { expect(response).to have_http_status 422 }
it { expect(subject['data']).to be_nil }
it { expect(subject['errors']).to be_a(Array) }
it { expect(subject.dig('errors', 0, 'status')).to eq '422' }
it { expect(subject.dig('errors', 0, 'code')).to eq 'validation_error' }
it { expect(subject.dig('errors', 0, 'meta')).to eq('error' => 'invalid', 'value' => 'foo', 'param' => 'address') }
end
describe 'ActionController::ParameterMissing' do
before do
body = JSON.dump(
data: {
attributes: {
address: '[email protected]'
}
}
)
post "/api/v1/people/#{person.id}/emails", params: body, headers: { 'Content-Type' => 'application/json' }
end
it { expect(response).to have_http_status 400 }
it { expect(subject['data']).to be_nil }
it { expect(subject['errors']).to be_a(Array) }
it { expect(subject.dig('errors', 0, 'status')).to eq '400' }
it { expect(subject.dig('errors', 0, 'code')).to eq 'missing_parameter' }
it { expect(subject.dig('errors', 0, 'meta')).to eq('param' => 'type') }
end
describe 'Onsi::Params::RelationshipNotFound' do
before do
body = JSON.dump(
data: {
type: 'message',
attributes: {
body: 'messages'
},
relationships: {
to: {
data: {
type: 'person',
id: '0'
}
}
}
}
)
post "/api/v1/people/#{person.id}/emails/#{email.id}/messages",
params: body,
headers: { 'Content-Type' => 'application/json' }
end
it { expect(response).to have_http_status 400 }
it { expect(subject['data']).to be_nil }
it { expect(subject['errors']).to be_a(Array) }
it { expect(subject.dig('errors', 0, 'status')).to eq '400' }
it { expect(subject.dig('errors', 0, 'code')).to eq 'missing_relationship' }
it { expect(subject.dig('errors', 0, 'meta')).to eq('param' => 'to_id') }
end
describe 'Onsi::Params::MissingReqiredAttribute' do
before do
body = JSON.dump(
data: {
type: 'email',
attributes: {
foo: 'bar'
}
}
)
post "/api/v1/people/#{person.id}/emails", params: body, headers: { 'Content-Type' => 'application/json' }
end
it { expect(response).to have_http_status 400 }
it { expect(subject['data']).to be_nil }
it { expect(subject['errors']).to be_a(Array) }
it { expect(subject.dig('errors', 0, 'status')).to eq '400' }
it { expect(subject.dig('errors', 0, 'code')).to eq 'missing_attribute' }
it { expect(subject.dig('errors', 0, 'meta')).to eq('attribute' => 'address') }
end
end
| 34.19708 | 119 | 0.576307 |
ffca3a80b4af6fbf7e726516f9b77bf19729b6bd | 728 | cask 'toggldesktop-beta' do
version '7.4.18'
sha256 'e0e6a2a44e91fb34fba9d7cb1e3a41a288ec55da9415efae18a83b396ab88287'
# github.com/toggl/toggldesktop was verified as official when first introduced to the cask
url "https://github.com/toggl/toggldesktop/releases/download/v#{version}/TogglDesktop-#{version.dots_to_underscores}.dmg"
appcast 'https://assets.toggl.com/installers/darwin_beta_appcast.xml',
checkpoint: '9aaab086611a52c3a30cd435fc87e6b28e293175b00fc6446e883b4fd9218ade'
name 'TogglDesktop'
homepage 'https://www.toggl.com/'
conflicts_with cask: [
'toggldesktop',
'toggldesktop-dev',
]
app 'TogglDesktop.app'
end
| 38.315789 | 123 | 0.707418 |
6260dd98bf22a1d55de5be1ad628e9cfdcc17d6f | 1,684 | class Cgvg < Formula
desc "Command-line source browsing tool"
homepage "https://uzix.org/cgvg.html"
url "https://uzix.org/cgvg/cgvg-1.6.3.tar.gz"
sha256 "d879f541abcc988841a8d86f0c0781ded6e70498a63c9befdd52baf4649a12f3"
license "GPL-2.0-or-later"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_big_sur: "db2e726fa0cc8e08dc89c030ea6aa2295b07a0559d7ec25d9ee159e5a917385e"
sha256 cellar: :any_skip_relocation, big_sur: "b5d4e1695f676016451d89d502a534d4536449f77ca52091ca49fd0a83909b3c"
sha256 cellar: :any_skip_relocation, catalina: "9ba7bdb16162f2ad7cb462cef5ad939ea15f93c759e272bc9fdf8eb9b1ad8df0"
sha256 cellar: :any_skip_relocation, mojave: "9f1f8ad71fda5ecf4341a28420e5c1629a4b5285edb5d40fbe13ace1965ea239"
sha256 cellar: :any_skip_relocation, high_sierra: "05dcddf73d630ab2f67e00ea63af02f6b29b503c2e938829daa67d7f619fb556"
sha256 cellar: :any_skip_relocation, sierra: "12b8a6abb31e2e8d7ba044663b33990884ec24d1b0c0776901480cbecd47113f"
sha256 cellar: :any_skip_relocation, el_capitan: "a8232322755cb4c369193dca37fecb968ff689c6463611680e12f216f46507c4"
sha256 cellar: :any_skip_relocation, yosemite: "de0c8b890aa68670097790093fdceccfe1d69598c18ea5385069efc2f73a3c5d"
sha256 cellar: :any_skip_relocation, x86_64_linux: "db2e726fa0cc8e08dc89c030ea6aa2295b07a0559d7ec25d9ee159e5a917385e"
end
def install
system "./configure", "--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
end
test do
(testpath/"test").write "Homebrew"
assert_match "1 Homebrew", shell_output("#{bin}/cg Homebrew '#{testpath}/test'")
end
end
| 52.625 | 122 | 0.779691 |
e2b23e183b428c157dfb72583763a28784977233 | 12,724 | require 'sinatra'
require 'zip'
config_options = JSON.parse(File.read('./config.json'))
######
# Admin Interfaces
######
get '/admin/' do
redirect to("/no_access") if not is_administrator?
@admin = true
haml :admin, :encode_html => true
end
get '/admin/add_user' do
redirect to("/no_access") if not is_administrator?
@admin = true
haml :add_user, :encode_html => true
end
# serve a copy of the code
get '/admin/pull' do
redirect to("/no_access") if not is_administrator?
if File.exists?("./export.zip")
send_file "./export.zip", :filename => "export.zip", :type => 'Application/octet-stream'
else
"No copy of the code available. Run scripts/make_export.sh."
end
end
#create DB backup
get '/admin/dbbackup' do
redirect to("/no_access") if not is_administrator?
bdate = Time.now()
filename = "./tmp/master" + "-" + (bdate.strftime("%Y%m%d%H%M%S") +".bak")
FileUtils::copy_file("./db/master.db", filename)
if not File.zero?(filename)
send_file filename, :filename => "#{filename}", :type => 'Application/octet-stream'
else
"No copy of the database is available. Please try again."
sleep(5)
redirect to("/admin/")
end
end
#create backup of all attachments
get '/admin/attacments_backup' do
bdate = Time.now()
zip_file = "./tmp/Attachments" + "-" + (bdate.strftime("%Y%m%d%H%M%S") +".zip")
Zip::File.open(zip_file, Zip::File::CREATE) do |zipfile|
Dir["./attachments/*" ].each do | name|
zipfile.add(name.split("/").last,name)
end
end
send_file zip_file, :type => 'zip', :filename => zip_file
#File.delete(rand_zip) should the temp file be deleted?
end
# Create a new user
post '/admin/add_user' do
redirect to("/no_access") if not is_administrator?
user = User.first(:username => params[:username])
if user
if params[:password] and params[:password].size > 1
# we have to hardcode the input params to prevent param pollution
user.update(:type => params[:type], :auth_type => params[:auth_type], :password => params[:password])
else
# we have to hardcode the params to prevent param pollution
user.update(:type => params[:type], :auth_type => params[:auth_type])
end
else
user = User.new
user.username = params[:username]
user.password = params[:password]
user.type = params[:type]
user.auth_type = params[:auth_type]
user.save
end
redirect to('/admin/list_user')
end
get '/admin/list_user' do
redirect to("/no_access") if not is_administrator?
@admin = true
@users = User.all
haml :list_user, :encode_html => true
end
get '/admin/edit_user/:id' do
redirect to("/no_access") if not is_administrator?
@user = User.first(:id => params[:id])
haml :add_user, :encode_html => true
end
get '/admin/delete/:id' do
redirect to("/no_access") if not is_administrator?
@user = User.first(:id => params[:id])
@user.destroy if @user
redirect to('/admin/list_user')
end
get '/admin/add_user/:id' do
if not is_administrator?
id = params[:id]
unless get_report(id)
redirect to("/no_access")
end
end
@users = User.all(:order => [:username.asc])
@report = Reports.first(:id => params[:id])
if is_administrator?
@admin = true
end
haml :add_user_report, :encode_html => true
end
post '/admin/add_user/:id' do
if not is_administrator?
id = params[:id]
unless get_report(id)
redirect to("/no_access")
end
end
report = Reports.first(:id => params[:id])
if report == nil
return "No Such Report"
end
authors = report.authors
if authors
authors = authors.push(params[:author])
else
authors = ["#{params[:author]}"]
end
report.authors = authors
report.save
redirect to("/reports/list")
end
get '/admin/del_user_report/:id/:author' do
if not is_administrator?
id = params[:id]
unless get_report(id)
redirect to("/no_access")
end
end
report = Reports.first(:id => params[:id])
if report == nil
return "No Such Report"
end
authors = report.authors
if authors
authors = authors - ["#{params[:author]}"]
end
report.authors = authors
report.save
redirect to("/reports/list")
end
get '/admin/config' do
redirect to("/no_access") if not is_administrator?
@config = config_options
if config_options["cvss"]
@scoring = "cvss"
elsif config_options["dread"]
@scoring = "dread"
else
@scoring = "default"
end
haml :config, :encode_html => true
end
post '/admin/config' do
redirect to("/no_access") if not is_administrator?
ft = params["finding_types"].split(",")
udv = params["user_defined_variables"].split(",")
config_options["finding_types"] = ft
config_options["user_defined_variables"] = udv
config_options["port"] = params["port"]
config_options["use_ssl"] = params["use_ssl"] ? true : false
config_options["bind_address"] = params["bind_address"]
config_options["ldap"] = params["ldap"] ? true : false
config_options["ldap_domain"] = params["ldap_domain"]
config_options["ldap_dc"] = params["ldap_dc"]
config_options["burpmap"] = params["burpmap"] ? true : false
config_options["nessusmap"] = params["nessusmap"] ? true : false
config_options["vulnmap"] = params["vulnmap"] ? true : false
config_options["logo"] = params["logo"]
config_options["auto_import"] = params["auto_import"] ? true : false
config_options["chart"] = params["chart"] ? true : false
config_options["threshold"] = params["threshold"]
config_options["show_exceptions"] = params["show_exceptions"] ? true : false
if params["risk_scoring"] == "CVSS"
config_options["dread"] = false
config_options["cvss"] = true
elsif params["risk_scoring"] == "DREAD"
config_options["dread"] = true
config_options["cvss"] = false
else
config_options["dread"] = false
config_options["cvss"] = false
end
File.open("./config.json","w") do |f|
f.write(JSON.pretty_generate(config_options))
end
redirect to("/admin/config")
end
# get plugins available
get '/admin/plugins' do
redirect to("/no_access") if not is_administrator?
@plugins = []
Dir[File.join(File.dirname(__FILE__), "../plugins/**/", "*.json")].each { |lib|
@plugins.push(JSON.parse(File.open(lib).read))
}
haml :plugins, :encode_html => true
end
# enable plugins
post '/admin/plugins' do
redirect to("/no_access") if not is_administrator?
@plugins = []
Dir[File.join(File.dirname(__FILE__), "../plugins/**/", "*.json")].each { |lib|
@plugins.push(JSON.parse(File.open(lib).read))
}
@plugins.each do |plug|
p params
if params[plug["name"]]
plug["enabled"] = true
File.open("./plugins/#{plug['name']}/plugin.json","w") do |f|
f.write(JSON.pretty_generate(plug))
end
else
plug["enabled"] = false
File.open("./plugins/#{plug['name']}/plugin.json","w") do |f|
f.write(JSON.pretty_generate(plug))
end
end
end
redirect to("/admin/plugins")
end
# Manage Templated Reports
get '/admin/templates' do
redirect to("/no_access") if not is_administrator?
@admin = true
# Query for all Findings
@templates = Xslt.all(:order => [:report_type.asc])
haml :template_list, :encode_html => true
end
# Manage Templated Reports
get '/admin/templates/add' do
redirect to("/no_access") if not is_administrator?
@admin = true
haml :add_template, :encode_html => true
end
# Manage Templated Reports
get '/admin/templates/:id/download' do
redirect to("/no_access") if not is_administrator?
@admin = true
xslt = Xslt.first(:id => params[:id])
send_file xslt.docx_location, :type => 'docx', :filename => "#{xslt.report_type}.docx"
end
get '/admin/delete/templates/:id' do
redirect to("/no_access") if not is_administrator?
@xslt = Xslt.first(:id => params[:id])
if @xslt
@xslt.destroy
File.delete(@xslt.xslt_location)
File.delete(@xslt.docx_location)
end
redirect to('/admin/templates')
end
# Manage Templated Reports
post '/admin/templates/add' do
redirect to("/no_access") if not is_administrator?
@admin = true
xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt"
redirect to("/admin/templates/add") unless params[:file]
# reject if the file is above a certain limit
if params[:file][:tempfile].size > 100000000
return "File too large. 10MB limit"
end
docx = "./templates/#{rand(36**36).to_s(36)}.docx"
File.open(docx, 'wb') {|f| f.write(params[:file][:tempfile].read) }
error = false
detail = ""
begin
xslt = generate_xslt(docx)
rescue ReportingError => detail
error = true
end
if error
"The report template you uploaded threw an error when parsing:<p><p> #{detail.errorString}"
else
# open up a file handle and write the attachment
File.open(xslt_file, 'wb') {|f| f.write(xslt) }
# delete the file data from the attachment
datax = Hash.new
# to prevent traversal we hardcode this
datax["docx_location"] = "#{docx}"
datax["xslt_location"] = "#{xslt_file}"
datax["description"] = params[:description]
datax["report_type"] = params[:report_type]
data = url_escape_hash(datax)
data["finding_template"] = params[:finding_template] ? true : false
data["status_template"] = params[:status_template] ? true : false
@current = Xslt.first(:report_type => data["report_type"])
if @current
@current.update(:xslt_location => data["xslt_location"], :docx_location => data["docx_location"], :description => data["description"])
else
@template = Xslt.new(data)
@template.save
end
redirect to("/admin/templates")
haml :add_template, :encode_html => true
end
end
# Manage Templated Reports
get '/admin/templates/:id/edit' do
redirect to("/no_access") if not is_administrator?
@admind = true
@template = Xslt.first(:id => params[:id])
haml :edit_template, :encode_html => true
end
# Manage Templated Reports
post '/admin/templates/edit' do
redirect to("/no_access") if not is_administrator?
@admin = true
template = Xslt.first(:id => params[:id])
xslt_file = template.xslt_location
redirect to("/admin/templates/#{params[:id]}/edit") unless params[:file]
# reject if the file is above a certain limit
if params[:file][:tempfile].size > 100000000
return "File too large. 10MB limit"
end
docx = "./templates/#{rand(36**36).to_s(36)}.docx"
File.open(docx, 'wb') {|f| f.write(params[:file][:tempfile].read) }
error = false
detail = ""
begin
xslt = generate_xslt(docx)
rescue ReportingError => detail
error = true
end
if error
"The report template you uploaded threw an error when parsing:<p><p> #{detail.errorString}"
else
# open up a file handle and write the attachment
File.open(xslt_file, 'wb') {|f| f.write(xslt) }
# delete the file data from the attachment
datax = Hash.new
# to prevent traversal we hardcode this
datax["docx_location"] = "#{docx}"
datax["xslt_location"] = "#{xslt_file}"
datax["description"] = params[:description]
datax["report_type"] = params[:report_type]
data = url_escape_hash(datax)
data["finding_template"] = params[:finding_template] ? true : false
data["status_template"] = params[:status_template] ? true : false
@current = Xslt.first(:report_type => data["report_type"])
if @current
@current.update(:xslt_location => data["xslt_location"], :docx_location => data["docx_location"], :description => data["description"])
else
@template = Xslt.new(data)
@template.save
end
redirect to("/admin/templates")
end
end
# get enabled plugins
get '/admin/admin_plugins' do
@menu = []
Dir[File.join(File.dirname(__FILE__), "../plugins/**/", "*.json")].each { |lib|
pl = JSON.parse(File.open(lib).read)
a = {}
if pl["enabled"] and pl["admin_view"]
# add the plugin to the menu
a["name"] = pl["name"]
a["description"] = pl["description"]
a["link"] = pl["link"]
@menu.push(a)
end
}
haml :enabled_plugins, :encode_html => true
end
| 26.900634 | 140 | 0.627161 |
ab70d65f50a62a315cc3ee849686ec1b17ef5e42 | 10,546 | # -*- coding: BINARY -*-
here = File.expand_path('../lib', __dir__)
$:.unshift(here) unless $:.include?(here)
require 'z80'
require 'zxlib/gfx/draw'
require 'zxlib/basic'
class Stars
include Z80
include Z80::TAP
import ZXLib::Sys, macros: true, code: false, labels: true
macro_import Stdlib
macro_import MathInt
macro_import ZXLib::Gfx::Draw
BG_STARS = 256
NSTARS = 64
SPEED = 3
MAX_SPEED = 32
STAR_COORD_HI_MASK = 0b00111111
class Star < Label
x word
y word
z byte
mask byte
saddr word
end
with_saved :start, :exx, hl, ret: true do |eoc|
call make_stars
release_key key_pressed?
jr NZ, release_key
ld a, 0b01000111
call clear_screen
call background_stars
mloop call move_stars
key_pressed?(0xf7) # [1]-[5]
rra # [1]
jr C, inc_speed
rra # [2]
jr C, dec_speed
jr NZ, quit
key_pressed?(0x08)
jr Z, mloop
# clear screen using CL-ALL and reset border
quit call rom.cl_all
ld a, [vars.bordcr]
call set_border_cr
jr eoc
dec_speed ld a, [move_stars.speed_p + 1]
anda a
jr Z, mloop
dec a
jr set_speed
inc_speed ld a, [move_stars.speed_p + 1]
inc a
cp MAX_SPEED
jr NC, mloop
set_speed ld [move_stars.speed_p + 1], a
jr mloop
end
macro :divmod16_8 do |eoc, th, tl, m| # divide (tt)/m
divmod th, m, check0:eoc, check1:eoc
divmod tl, m, clrrem:false
anda a # clear CF
end
macro :persp_coord do |_, th, tl| # z: c, tt: x -> hl: x/z, CF:div by 0, a': sgn(x-128)
rlc tl # we just need one random bit for a sign
ex af, af # CF: sgn(x)
divmod16_8 th, tl, c # (x*4)/z
end
ns :move_stars do
ld [restore_sp + 1], sp
di
ld sp, stars
nstars_p ld b, NSTARS
sloop exx
pop hl # x
pop de # y
pop bc # z
ld a, b # mask
exx
pop hl # screen address
xor [hl]
ld [hl], a # clear star
exx
persp_coord h, l # hl: x/z
jr C, skip_star # z=0
ld a, h
anda a
jr NZ, skip_star # hl >= 256
ex af, af
jr C, x_negative
ld a, l # x >= 0
add 128 # x to screen coordinates
jr NC, skip_neg_x # xp < 256
# xp >= 256
skip_star ld a, c # c: z
ex af, af
xor a
ld d, a
ld h, a
ld l, a
jp skip_plot
x_negative ld a, 128 # x < 0
sub l # x to screen coordinates
jr C, skip_star # xp < 0
skip_neg_x ld l, a # l: xp
ex de, hl # e: xp, hl: y
persp_coord h, l # hl: y/z
jr C, skip_star # z=0
ld a, h
anda a
jr NZ, skip_star # hl >= 256
ex af, af
jr C, y_negative
ld a, l # y >= 0
add 96 # y to screen coordinates
jr NC, skip_neg_y # yp < 256
jr skip_star # yp >= 256
y_negative ld a, 96 # y < 0
sub l # y to screen coordinates
jr C, skip_star # yp < 0
skip_neg_y cp 192
jr NC, skip_star # yp >= 192
ld h, a # yp
ld l, e # xp
ld a, c # a: z
ex af, af # a': z
plot plot_pixel(l, h, preshifted_pixel, fx: :skip, scraddr:0x4000)
ld d, a # pixel
xor [hl]
ld [hl], a
skip_plot push hl # screen addr
ex af, af # a: z
speed_p sub SPEED
ld e, a # z
push de
jr C, reinitialize_star
4.times { inc sp }
back_to_loop exx
dec b
jp NZ, sloop
restore_sp ld sp, 0
ei
ret
reinitialize_star ld a, 0b00010000
out (254), a
call rand_seed
ld a, h
anda STAR_COORD_HI_MASK
ld d, a
ld e, l
push de # y
call rand_fn_hl
ld [seed], hl
ld a, h
anda STAR_COORD_HI_MASK
ld d, a
ld e, l
push de # x
ld hl, +stars
add hl, sp
ld sp, hl
xor a
out (254), a
jr back_to_loop
end
# clears screen area with border and attributes set according to register a
clear_screen clrmem mem.attrs, mem.attrlen, a
set_border_cr anda 0b00111000
3.times { rrca }
out (254), a
call clearscr
ret
# clear pixel screen
clearscr clrmem mem.screen, mem.scrlen, 0
ret
ns :make_stars do
ld [restore_sp + 1], sp
di
ld sp, stars_end
ld hl, [vars.seed]
exx
ld b, 256 # 256 stars max
ld de, 0
mloop push de # screen addr
exx
call rand_fn_hl
ld e, l
ld d, 0
push de # mask|z
call rand_fn_hl
ld e, l
ld a, h
anda STAR_COORD_HI_MASK
ld d, a
push de # y
call rand_fn_hl
ld e, l
ld a, h
anda STAR_COORD_HI_MASK
ld d, a
push de # x
exx
djnz mloop
exx
ld [seed], hl
restore_sp ld sp, 0
ei
ret
end
ns :background_stars do
bstars_p ld b, BG_STARS
ld ixl, 0b00000001
exx
call rand_seed
jr skip
bloop exx
next_plot call rand_fn_hl
skip ld a, h
cp 192
jr NC, change_color
push hl
plot_pixel(l, h, preshifted_pixel, fx: :or, with_attributes: :overwrite, color_attr: ixl, scraddr:0x4000)
pop hl
exx
djnz bloop
exx
ld [seed], hl
ret
change_color ld a, h
xor l
anda 0b01000011
jr NZ, set_color
ld a, 0b00000001
set_color ld ixl, a
jr next_plot
end
rand_seed ld hl, 0
rand_fn_hl rnd
ret
seed union rand_seed+1, 2
nstars union move_stars.nstars_p + 1, 1
bstars union background_stars.bstars_p + 1, 1
preshifted_pixel preshifted_pixel_mask_data :pixel
stars label Star
stars_end union stars[256], Star
end
include ZXLib
stars = Stars.new 0xf000
program = Basic.parse_source <<-EOC
10 RANDOMIZE USR #{stars[:start]}
INPUT "How many background stars? "; bstars: POKE #{stars[:bstars]}, bstars
INPUT "How many moving stars? "; nstars: POKE #{stars[:nstars]}, nstars
GO TO 10
9998 STOP
9999 CLEAR #{stars.org-1}: LOAD ""CODE: RUN
EOC
puts stars.debug
puts "start: #{stars[:start]}"
puts "nstars: #{stars[:nstars]}"
puts "bstars: #{stars[:bstars]}"
puts "stars data: #{stars[:stars]}"
puts "stars end: #{stars[:stars_end]}"
puts program.to_source escape_keywords: true
program.save_tap "examples/stars", line: 9999
stars.save_tap "examples/stars", append: true
puts "TAP: stars.tap:"
Z80::TAP.parse_file('examples/stars.tap') do |hb|
puts hb.to_s
end
| 34.24026 | 127 | 0.351318 |
0342ef661050391e7ec85b74924c7f8088204e9c | 766 | # frozen_string_literal: true
require 'bundler/setup'
require 'polyphony'
require 'polyphony/fs'
def raw_read_file(x)
t0 = Time.now
x.times { IO.orig_read(__FILE__) }
puts "raw_read_file: #{Time.now - t0}"
end
def threaded_read_file(x, y)
t0 = Time.now
threads = []
y.times do
threads << Thread.new { x.times { IO.orig_read(PATH) } }
end
threads.each(&:join)
puts "threaded_read_file: #{Time.now - t0}"
end
def thread_pool_read_file(x, y)
t0 = Time.now
supervise do |s|
y.times do
s.spin { x.times { IO.read(PATH) } }
end
end
puts "thread_pool_read_file: #{Time.now - t0}"
end
Y = ARGV[0] ? ARGV[0].to_i : 10
X = ARGV[1] ? ARGV[1].to_i : 100
raw_read_file(X * Y)
threaded_read_file(X, Y)
thread_pool_read_file(X, Y)
| 19.641026 | 60 | 0.660574 |
e9748f997a5c11a62ac5509f992fc21189b2fa88 | 115 | module AuthHelper
def http_login
basic_authorize(CONFIG["auth_username"], CONFIG["auth_password"])
end
end
| 19.166667 | 69 | 0.765217 |
e254b60dbf1873baeb5e8b6cf1b3dffd603ef6cd | 621 | # We are eval'd in the scope of the acceptance framework's option-parsing
# code, so we can't use __FILE__ to find our location. We have access to
# a variable 'options_file_path', though.
require File.expand_path(File.join(File.dirname(options_file_path), 'common.rb'))
common_options_hash.tap do |my_hash|
my_hash[:puppetdb_database] = 'embedded'
if ENV['BEAKER_TYPE'] == 'aio'
my_hash[:is_puppetserver] = 'true'
my_hash[:'use-service'] = 'true'
my_hash[:'puppetserver-confdir'] = '/etc/puppetlabs/puppetserver/conf.d'
my_hash[:puppetservice] = 'puppetserver'
end
end
| 41.4 | 81 | 0.697262 |
1a31ad2108ac9e06541185eab145336fef69a2e0 | 464 | # frozen_string_literal: true
module ElasticQueryStringQuery
def filtered_query_query(json)
return if @q.blank?
json.must do
json.child! { query_string(json, highlighted_fields, @q, query_string_options) }
end
end
def query_string(json, fields, query, options = {})
json.query_string do
json.fields fields
json.query query
options.each do |option, value|
json.set! option, value
end
end
end
end
| 21.090909 | 86 | 0.676724 |
1cc7d15a3a47db00f9e5be804896df793a8de1fd | 1,612 | # Frozen-string-literal: true
# Copyright: 2012 - 2018 - MIT License
# Author: Jordon Bedwell
# Encoding: utf-8
module Jekyll
module Assets
module Plugins
class Liquid
TYPES = {
"text/liquid+sass" => %w(.sass.liquid .liquid.sass),
"application/liquid+javascript" => %w(.liquid.js .js.liquid),
"application/liquid+ecmascript-6" => %w(.liquid.es6 .es6.liquid),
"text/liquid+coffeescript" => %w(.liquid.coffee .coffee.liquid),
"text/liquid+scss" => %w(.liquid.scss .scss.liquid),
"text/liquid+css" => %w(.liquid.css .css.liquid),
"image/liquid+svg+xml" => %w(.liquid.svg .svg.liquid),
}.freeze
def self.call(ctx)
env = ctx[:environment]
registers = { site: env.jekyll }
environment = env.jekyll.to_liquid.merge(jekyll: {
"version" => Jekyll::VERSION, "environment" => Jekyll.env
})
bctx = ::Liquid::Context.new(environment, {}, registers)
ctx[:data] = env.parse_liquid(ctx[:data], {
ctx: bctx,
})
end
end
# --
# Registers it inside of Sprockets.
# Because we need to keep some support for 3.x we register it
# two different ways depending on the type of Sprockets.
# --
Liquid::TYPES.each do |k, v|
to = Utils.strip_secondary_content_type(k)
charset = Sprockets.mime_types[to][:charset]
Sprockets.register_mime_type(k, extensions: v, charset: charset)
Sprockets.register_transformer(k, to, Liquid)
end
end
end
end
| 33.583333 | 75 | 0.585608 |
ab7639a4f6f6645a39d62f3d46547f4648143337 | 10,886 | require_relative '../../helpers/avatar_helper'
require_dependency 'carto/controller_helper'
module Carto
module Api
class UsersController < ::Api::ApplicationController
include OrganizationUsersHelper
include AppAssetsHelper
include MapsApiHelper
include SqlApiHelper
include CartoDB::ConfigUtils
include FrontendConfigHelper
include AccountTypeHelper
include AvatarHelper
begin
include OnpremisesLicensingGear::ApplicationHelper
rescue NameError
end
UPDATE_ME_FIELDS = %i(
name last_name website description location twitter_username disqus_shortname available_for_hire company
industry phone job_role company_employees use_case
).freeze
PASSWORD_DOES_NOT_MATCH_MESSAGE = 'Password does not match'.freeze
ssl_required
before_action :optional_api_authorization, only: [:me]
before_action :recalculate_user_db_size, only: [:me]
skip_before_action :api_authorization_required, only: [:me, :get_authenticated_users]
skip_before_action :check_user_state, only: [:me, :delete_me]
rescue_from StandardError, with: :rescue_from_standard_error
def show
render json: Carto::Api::UserPresenter.new(uri_user).data
end
def me
carto_viewer = current_viewer.present? ? Carto::User.find(current_viewer.id) : nil
config = {
user_frontend_version: CartoDB::Application.frontend_version
}
if carto_viewer.present?
cant_be_deleted_reason = carto_viewer.cant_be_deleted_reason
config = {
user_data: Carto::Api::UserPresenter.new(carto_viewer).data,
default_fallback_basemap: carto_viewer.default_basemap,
dashboard_notifications: carto_viewer.notifications_for_category(:dashboard),
organization_notifications: organization_notifications(carto_viewer),
unfiltered_organization_notifications: unfiltered_organization_notifications(carto_viewer),
is_just_logged_in: !!flash['logged'],
is_first_time_viewing_dashboard: !carto_viewer.dashboard_viewed_at,
can_change_email: carto_viewer.can_change_email?,
auth_username_password_enabled: carto_viewer.organization.try(:auth_username_password_enabled),
can_change_password: carto_viewer.can_change_password?,
plan_name: plan_name(carto_viewer.account_type),
plan_url: carto_viewer.plan_url(request.protocol),
can_be_deleted: cant_be_deleted_reason.nil?,
cant_be_deleted_reason: cant_be_deleted_reason,
services: carto_viewer.get_oauth_services,
user_frontend_version: carto_viewer.relevant_frontend_version,
asset_host: carto_viewer.asset_host,
google_sign_in: carto_viewer.google_sign_in,
mfa_required: multifactor_authentication_required?,
license_expiration: license_expiration
}
end
config[:config] = frontend_config_hash(current_viewer)
render json: config
end
def update_me
user = current_viewer
attributes = params[:user]
if attributes.present?
unless user.valid_password_confirmation(attributes[:password_confirmation])
raise Carto::PasswordConfirmationError.new
end
update_user_attributes(user, attributes)
raise Sequel::ValidationFailed.new('Validation failed') unless user.errors.try(:empty?) && user.valid?
ActiveRecord::Base.transaction do
update_user_multifactor_authentication(user, attributes[:mfa])
user.update_in_central
user.save(raise_on_failure: true)
end
end
render_jsonp(Carto::Api::UserPresenter.new(user, current_viewer: current_viewer).to_poro)
rescue CartoDB::CentralCommunicationFailure => e
log_error(exception: e, target_user: user, params: params)
render_jsonp({ errors: "There was a problem while updating your data. Please, try again." }, 422)
rescue Sequel::ValidationFailed, ActiveRecord::RecordInvalid
render_jsonp({ message: "Error updating your account details", errors: user.errors }, 400)
rescue Carto::PasswordConfirmationError
render_jsonp({ message: "Error updating your account details", errors: user.errors }, 403)
end
def delete_me
user = current_viewer
deletion_password_confirmation = params[:deletion_password_confirmation]
if user.needs_password_confirmation? && !user.validate_old_password(deletion_password_confirmation)
render_jsonp({ message: "Error deleting user: #{PASSWORD_DOES_NOT_MATCH_MESSAGE}" }, 400) and return
end
user.destroy_account
render_jsonp({ logout_url: logout_url }, 200)
rescue CartoDB::CentralCommunicationFailure => e
log_error(exception: e, message: 'Central error deleting user at CartoDB', target_user: @user)
render_jsonp({ errors: "Error deleting user: #{e.user_message}" }, 422)
rescue StandardError => e
CartoDB.notify_exception(e, user: user.inspect)
render_jsonp({ message: "Error deleting user: #{e.message}", errors: user.errors }, 400)
end
def get_authenticated_users
referer = request.env["HTTP_ORIGIN"].blank? ? request.env["HTTP_REFERER"] : %[#{request.env['HTTP_X_FORWARDED_PROTO']}://#{request.env["HTTP_HOST"]}]
referer_match = /https?:\/\/([\w\-\.]+)(:[\d]+)?(\/((u|user)\/([\w\-\.]+)))?/.match(referer)
if referer_match.nil?
render json: { error: "Referer #{referer} does not match" }, status: 400 and return
end
if session_user.nil?
render json: {
urls: [],
username: nil,
avatar_url: nil
} and return
end
subdomain = referer_match[1].gsub(CartoDB.session_domain, '').downcase
# referer_match[6] is the username
referer_organization_username = referer_match[6]
render_auth_users_data(session_user, referer, subdomain, referer_organization_username)
end
private
def unfiltered_organization_notifications(carto_viewer)
carto_viewer.received_notifications.order('received_at DESC').limit(10).map do |n|
Carto::Api::ReceivedNotificationPresenter.new(n).to_hash
end
end
def organization_notifications(carto_viewer)
carto_viewer.received_notifications.unread.map { |n| Carto::Api::ReceivedNotificationPresenter.new(n).to_hash }
end
def render_auth_users_data(user, referrer, subdomain, referrer_organization_username=nil)
organization_name = nil
# It doesn't have a organization username component. We assume it's not a organization referer
if referrer_organization_username.nil?
# The user is authenticated but seeing another user dashboard
if user.username != subdomain
organization_name = CartoDB::UserOrganization.user_belongs_to_organization?(user.username)
end
else
referrer_organization_username = referrer_organization_username.downcase
# The user is seeing its own organization dashboard
if user.username == referrer_organization_username
organization_name = subdomain
# The user is seeing a organization dashboard, but not its one
else
# Authenticated with a user of the organization
if user.organization && user.organization.name == subdomain
organization_name = subdomain
# The user is authenticated with a user not belonging to the requested organization dashboard
# Let's get the first user in the session
else
organization_name = CartoDB::UserOrganization.user_belongs_to_organization?(user.username)
end
end
end
render json: {
urls: ["#{CartoDB.base_url(user.username, organization_name)}#{CartoDB.path(self, 'dashboard_bis')}"],
username: user.username,
name: user.name,
last_name: user.last_name,
avatar_url: user.avatar_url,
email: user.email,
organization: Carto::Api::OrganizationPresenter.new(user.organization).to_poro,
base_url: user.public_url
}
end
# TODO: this should be moved upwards in the controller hierarchy, and make it a replacement for current_user
# URI present-user if has valid session, or nil
def uri_user
@uri_user ||= (current_user.nil? ? nil : Carto::User.where(id: current_user.id).first)
end
# TODO: this should be moved upwards in the controller hierarchy, and make it a replacement for current_viewer
# 1st user that has valid session, if coincides with URI then same as uri_user
def session_user
@session_user ||= (current_viewer.nil? ? nil : Carto::User.where(id: current_viewer.id).first)
end
def update_user_attributes(user, attributes)
update_password_if_needed(user, attributes)
if user.can_change_email? && attributes[:email].present?
user.set_fields(attributes, [:email])
end
if attributes[:avatar_url].present? && valid_avatar_file?(attributes[:avatar_url])
user.set_fields(attributes, [:avatar_url])
end
fields_to_be_updated = UPDATE_ME_FIELDS.select { |field| attributes.has_key?(field) }
user.set_fields(attributes, fields_to_be_updated) if fields_to_be_updated.present?
end
def update_password_if_needed(user, attributes)
if password_change?(user, attributes)
user.change_password(
attributes[:password_confirmation],
attributes[:new_password],
attributes[:confirm_password]
)
update_session_security_token(user)
end
end
def password_change?(user, attributes)
(attributes[:new_password].present? || attributes[:confirm_password].present?) && user.can_change_password?
end
def recalculate_user_db_size
current_user && Carto::UserDbSizeCache.new.update_if_old(current_user)
end
def update_user_multifactor_authentication(user, mfa_enabled)
return if mfa_enabled.nil?
service = Carto::UserMultifactorAuthUpdateService.new(user_id: user.id)
service.update(enabled: mfa_enabled)
warden.session(user.username)[:multifactor_authentication_performed] = false unless mfa_enabled
end
def license_expiration
return nil unless cartodb_com_hosted?
send(:license_expiration_date) if respond_to?(:license_expiration_date)
end
end
end
end
| 41.234848 | 157 | 0.681701 |
ab00a92d881ca9d31663f188d76862d86730d27d | 988 | require 'spec_helper'
describe Confetti::Config::Author do
before do
@author = Confetti::Config::Author.new
end
it "should have a readable and writable name field" do
lambda { @author.name = "Andrew Lunny" }.should_not raise_error
@author.name.should == "Andrew Lunny"
end
it "should have a readable and writable href field" do
lambda {
@author.href = "http://alunny.github.com"
}.should_not raise_error
@author.href.should == "http://alunny.github.com"
end
it "should have a readable and writable email field" do
lambda { @author.email = "[email protected]" }.should_not raise_error
@author.email.should == "[email protected]"
end
it "should define a defined_attrs method" do
author = Confetti::Config::Author.new(
"hardeep", "127.0.0.1", "[email protected]")
author.defined_attrs.should == {
"name" => "hardeep",
"href" => "127.0.0.1",
"email" => "[email protected]"
}
end
end
| 26.702703 | 72 | 0.65081 |
abbe04edf6910920fbbe259932b36eb9eacd5d5a | 296 | require File.expand_path('../fantasy-irc/irc', __FILE__)
require File.expand_path('../fantasy-irc/events', __FILE__)
require File.expand_path('../fantasy-irc/plugins', __FILE__)
require File.expand_path('../fantasy-irc/rooms', __FILE__)
require File.expand_path('../fantasy-irc/users', __FILE__)
| 49.333333 | 60 | 0.763514 |
5db5b98b069d2ccfd4f5e7b89d39f589b396dbbc | 32,533 | RSpec.describe RelationshipMixin do
let(:test_rel_type) { "testing" }
# 0
# 1 2
# 3 4 5 6 7
# 8 9
let(:vms_rel_tree) { {0 => [{1 => [3, 4]}, {2 => [5, 6, {7 => [8, 9]}]}]} }
let(:vms) { build_relationship_tree(vms_rel_tree) }
# host with no tree
let(:host) { FactoryBot.create(:host) }
describe "#remove_children" do
it "handles [nil]" do
expect { vms[3].remove_children(nil) }.to_not raise_error
end
end
context "tree with relationship" do
it "#with_relationship_type and #relationship_type" do
expect(vms[0].relationship_type).not_to eq(test_rel_type)
vms[0].with_relationship_type(test_rel_type) do
expect(vms[0].relationship_type).to eq(test_rel_type)
end
expect(vms[0].parents).to be_empty
expect(vms[0].children).to be_empty
vms[0].clear_relationships_cache
expect(vms[0].with_relationship_type(test_rel_type) { |v| v.parents.length }).to eq(0)
expect(vms[0].with_relationship_type(test_rel_type) { |v| v.children.length }).to eq(2)
expect(vms[0].parents).to be_empty
expect(vms[0].children).to be_empty
end
it "#parents" do
expect(vms[0].parents).to be_empty
recurse_relationship_tree(vms_rel_tree) do |parent, child|
vms[child].with_relationship_type(test_rel_type) do |c|
expect(c.parents).to eq([vms[parent]])
end
end
end
it "#children" do
recurse_relationship_tree(vms_rel_tree) do |parent, child|
vms[parent].with_relationship_type(test_rel_type) do |p|
expect(p.children).to include vms[child]
end
end
end
# NOTE for understanding the next 4 contexts:
# Objects (VMs, Hosts, etc) have associated tree nodes entries in the
# relationships table which are linked. If an object must reside in
# multiple parts of the tree via having multiple parents, it will need more
# than one associated tree node.
context "#set_child on a new parent object" do
before { @parent = FactoryBot.create(:vm_vmware) }
it "with a second new object will link a new tree node for the parent to a new tree node for the child" do
child = FactoryBot.create(:vm_vmware)
@parent.with_relationship_type(test_rel_type) { |v| v.set_child(child) }
assert_parent_child_structure(test_rel_type,
@parent, 1, [], [child],
child, 1, [@parent], []
)
end
it "with a root object will link a new tree node for the parent to the existing tree node for the child" do
child = vms[0]
@parent.with_relationship_type(test_rel_type) { |v| v.set_child(child) }
assert_parent_child_structure(test_rel_type,
@parent, 1, [], [child],
child, 1, [@parent], [vms[1], vms[2]]
)
end
it "with an inner object will link a new tree node for the parent to a second new tree node for the child" do
child = vms[1]
@parent.with_relationship_type(test_rel_type) { |v| v.set_child(child) }
assert_parent_child_structure(test_rel_type,
@parent, 1, [], [child],
child, 2, [vms[0], @parent], [vms[3], vms[4]]
)
end
it "with a leaf object will link a new tree node for the parent to a second new tree node for the child" do
child = vms[3]
@parent.with_relationship_type(test_rel_type) { |v| v.set_child(child) }
assert_parent_child_structure(test_rel_type,
@parent, 1, [], [child],
child, 2, [vms[1], @parent], []
)
end
end
context "#set_parent on a new child object" do
before { @child = FactoryBot.create(:vm_vmware) }
it "with a second new object will link a new tree node for the parent to a new tree node for the child" do
parent = FactoryBot.create(:vm_vmware)
@child.with_relationship_type(test_rel_type) { |v| v.set_parent(parent) }
assert_parent_child_structure(test_rel_type,
parent, 1, [], [@child],
@child, 1, [parent], []
)
end
it "with a root object will link the existing tree node for the parent to a new tree node for the child" do
parent = vms[0]
@child.with_relationship_type(test_rel_type) { |v| v.set_parent(parent) }
assert_parent_child_structure(test_rel_type,
parent, 1, [], [vms[1], vms[2], @child],
@child, 1, [parent], []
)
end
it "with an inner object will link the existing tree node for the parent to a new tree node for the child" do
parent = vms[1]
@child.with_relationship_type(test_rel_type) { |v| v.set_parent(parent) }
assert_parent_child_structure(test_rel_type,
parent, 1, [vms[0]], [vms[3], vms[4], @child],
@child, 1, [parent], []
)
end
it "with a leaf object will link the existing tree node for the parent to a new tree node for the child" do
parent = vms[3]
@child.with_relationship_type(test_rel_type) { |v| v.set_parent(parent) }
assert_parent_child_structure(test_rel_type,
parent, 1, [vms[1]], [@child],
@child, 1, [parent], []
)
end
end
context "with a new parent object, #replace_parent" do
before { @parent = FactoryBot.create(:vm_vmware) }
it "on a second new object will link a new tree node for the parent to a new tree node for the child and be the only parent for the child" do
child = FactoryBot.create(:vm_vmware)
child.with_relationship_type(test_rel_type) { |v| v.replace_parent(@parent) }
assert_parent_child_structure(test_rel_type,
@parent, 1, [], [child],
child, 1, [@parent], []
)
end
it "on a root object will link a new tree node for the parent to the existing tree node for the child and be the only parent for the child" do
child = vms[0]
child.with_relationship_type(test_rel_type) { |v| v.replace_parent(@parent) }
assert_parent_child_structure(test_rel_type,
@parent, 1, [], [child],
child, 1, [@parent], [vms[1], vms[2]]
)
end
it "on an inner object will link a new tree node for the parent to the existing tree node for the child and be the only parent for the child" do
child = vms[1]
child.with_relationship_type(test_rel_type) { |v| v.replace_parent(@parent) }
assert_parent_child_structure(test_rel_type,
@parent, 1, [], [child],
child, 1, [@parent], [vms[3], vms[4]]
)
end
it "on a leaf object will link a new tree node for the parent to the existing tree node for the child and be the only parent for the child" do
child = vms[3]
child.with_relationship_type(test_rel_type) { |v| v.replace_parent(@parent) }
assert_parent_child_structure(test_rel_type,
@parent, 1, [], [child],
child, 1, [@parent], []
)
end
end
context "#replace_parent on an inner object" do
it "with another inner object will link the existing tree node for the parent to the existing tree node for the child and be the only parent for the child " do
parent = vms[1]
child = vms[2]
child.with_relationship_type(test_rel_type) { |v| v.replace_parent(parent) }
assert_parent_child_structure(test_rel_type,
parent, 1, [vms[0]], [child, vms[3], vms[4]],
child, 1, [parent], [vms[5], vms[6], vms[7]]
)
end
end
describe "#add_parent" do
let(:folder1) { FactoryBot.create(:ems_folder) }
let(:folder2) { FactoryBot.create(:ems_folder) }
let(:vm) { FactoryBot.create(:vm) }
it "puts an object under another object" do
vm.with_relationship_type(test_rel_type) do
vm.add_parent(folder1)
expect(vm.parents).to eq([folder1])
expect(vm.parent).to eq(folder1)
end
expect(folder1.with_relationship_type(test_rel_type) { folder1.children }).to eq([vm])
end
it "allows an object to be placed under multiple parents" do
vm.with_relationship_type(test_rel_type) do
vm.add_parent(folder1)
vm.add_parent(folder2)
expect(vm.parents).to match_array([folder1, folder2])
expect { vm.parent }.to raise_error(RuntimeError, "Multiple parents found.")
end
expect(folder1.with_relationship_type(test_rel_type) { folder1.children }).to eq([vm])
expect(folder2.with_relationship_type(test_rel_type) { folder2.children }).to eq([vm])
end
end
describe "#parent=" do
let(:folder) { FactoryBot.create(:ems_folder) }
let(:vm) { FactoryBot.create(:vm) }
it "puts an object under another object" do
vm.with_relationship_type(test_rel_type) do
vm.parent = folder
expect(vm.parent).to eq(folder)
end
expect(folder.with_relationship_type(test_rel_type) { folder.children }).to eq([vm])
end
it "moves an object that already has a parent under an another object" do
vm.with_relationship_type(test_rel_type) { vm.parent = FactoryBot.create(:ems_folder) }
vm.reload
vm.with_relationship_type(test_rel_type) do
vm.parent = folder
expect(vm.parent).to eq(folder)
end
expect(folder.with_relationship_type(test_rel_type) { folder.children }).to eq([vm])
end
end
it "#replace_children" do
new_vms = (0...2).collect { FactoryBot.create(:vm_vmware) }
vms[0].with_relationship_type(test_rel_type) do |v|
v.replace_children(new_vms)
expect(new_vms).to match_array(v.children)
end
vms[1].with_relationship_type(test_rel_type) do |v|
expect(v.parents).to be_empty
end
end
it "#remove_all_parents" do
vms[1].with_relationship_type(test_rel_type) do |v|
v.remove_all_parents
expect(v.parents).to be_empty
end
end
it "#remove_all_children" do
vms[1].with_relationship_type(test_rel_type) do |v|
v.remove_all_children
expect(v.children).to be_empty
end
end
it "#remove_all_relationships" do
vms[1].with_relationship_type(test_rel_type) do |v|
v.remove_all_relationships
expect(v.parents).to be_empty
expect(v.children).to be_empty
end
end
it "#is_descendant_of?" do
expect(vms[1].with_relationship_type(test_rel_type) { |v| v.is_descendant_of?(vms[0]) }).to be_truthy
expect(vms[3].with_relationship_type(test_rel_type) { |v| v.is_descendant_of?(vms[0]) }).to be_truthy
expect(vms[2].with_relationship_type(test_rel_type) { |v| v.is_descendant_of?(vms[1]) }).not_to be_truthy
end
it "#is_ancestor_of?" do
expect(vms[0].with_relationship_type(test_rel_type) { |v| v.is_ancestor_of?(vms[1]) }).to be_truthy
expect(vms[0].with_relationship_type(test_rel_type) { |v| v.is_ancestor_of?(vms[3]) }).to be_truthy
expect(vms[2].with_relationship_type(test_rel_type) { |v| v.is_ancestor_of?(vms[1]) }).not_to be_truthy
end
it "#ancestors" do
expect(vms[0].with_relationship_type(test_rel_type) { |v| v.ancestors.empty? }).to be_truthy
expect(vms[9].with_relationship_type(test_rel_type, &:ancestors)).to match_array([vms[7], vms[2], vms[0]])
end
it "#descendants" do
expect(vms[9].with_relationship_type(test_rel_type) { |v| v.descendants.empty? }).to be_truthy
expect(vms[0].with_relationship_type(test_rel_type, &:descendants)).to match_array(vms.values - [vms[0]])
end
end
context "tree with relationship type 'ems_metadata'" do
let(:vms) { build_relationship_tree(vms_rel_tree, "ems_metadata") }
it "#detect_ancestor" do
expect(vms[8].with_relationship_type("ems_metadata") { |v| v.detect_ancestor { |a| a.id == vms[2].id } }).not_to be_nil
expect(vms[8].with_relationship_type("ems_metadata") { |v| v.detect_ancestor { |a| a.id == vms[1].id } }).to be_nil
end
end
context ".alias_with_relationship_type" do
before do
@ws = FactoryBot.create(:miq_widget_set)
@w1 = FactoryBot.create(:miq_widget)
@w2 = FactoryBot.create(:miq_widget)
@ws.add_member(@w1)
@ws.add_member(@w2)
end
it "of a method with arguments" do
@ws.remove_member(@w1)
expect(@ws.members.length).to eq(1)
end
it "of a method without arguments" do
expect(@ws.members.length).to eq(2)
end
end
describe "#root" do
it "is self with with no relationships" do
host # execute the query
expect do
nodes = host.with_relationship_type(test_rel_type, &:root)
expect(nodes).to eq(host)
end.to make_database_queries(:count => 1) # lookup the relationship node
end
it "is a self with a tree's root node" do
vms # execute the lookup query
expect do
nodes = vms[0].with_relationship_type(test_rel_type, &:root)
expect(nodes).to eq(vms[0])
end.to make_database_queries(:count => 1) # lookup the relationship node
end
it "is a parent with a tree's child node" do
nodes = vms[7].with_relationship_type(test_rel_type, &:root)
expect(nodes).to eq(vms[0])
end
end
describe "#root_id" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:root_id)
expect(nodes).to eq(["Host", host.id])
end
it "is a self with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:root_id)
expect(nodes).to eq(["VmOrTemplate", vms[0].id])
end
it "is a parent with a tree's child node" do
nodes = vms[7].with_relationship_type(test_rel_type, &:root_id)
expect(nodes).to eq(["VmOrTemplate", vms[0].id])
end
end
# VMs override path, so we will work with host trees
describe "#path" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:path)
expect(nodes).to eq([host])
end
it "is a self with a tree's root node" do
hosts = build_relationship_tree({0 => [1, 2]}, test_rel_type, :host_vmware)
nodes = hosts[0].with_relationship_type(test_rel_type, &:path)
expect(nodes).to eq([hosts[0]])
end
it "is a parent with a tree's child node" do
hosts = build_relationship_tree({0 => [{1 => [3, 4]}, 2]}, test_rel_type, :host_vmware)
nodes = hosts[3].with_relationship_type(test_rel_type, &:path)
expect(nodes).to eq([hosts[0], hosts[1], hosts[3]])
end
end
describe "#path_id" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:path_ids)
expect(nodes).to eq([["Host", host.id]])
end
it "is a self with a tree's root node" do
hosts = build_relationship_tree({0 => [1, 2]}, test_rel_type, :host_vmware)
nodes = hosts[0].with_relationship_type(test_rel_type, &:path_ids)
expect(nodes).to eq([["Host", hosts[0].id]])
end
it "is a parent with a tree's child node" do
hosts = build_relationship_tree({0 => [{1 => [3, 4]}, 2]}, test_rel_type, :host_vmware)
nodes = hosts[3].with_relationship_type(test_rel_type, &:path_ids)
expect(nodes).to eq([["Host", hosts[0].id], ["Host", hosts[1].id], ["Host", hosts[3].id]])
end
end
describe "#path_count" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:path_count)
expect(nodes).to eq(1)
end
it "is a self with a tree's root node" do
hosts = build_relationship_tree({0 => [1, 2]}, test_rel_type, :host_vmware)
nodes = hosts[0].with_relationship_type(test_rel_type, &:path_count)
expect(nodes).to eq(1)
end
it "is a parent with a tree's child node" do
hosts = build_relationship_tree({0 => [{1 => [3, 4]}, 2]}, test_rel_type, :host_vmware)
nodes = hosts[3].with_relationship_type(test_rel_type, &:path_count)
expect(nodes).to eq(3)
end
end
describe "#ancestors" do
it "is empty with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:ancestors)
expect(nodes).to eq([])
end
it "is empty with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:ancestors)
expect(nodes).to eq([])
end
it "is an ancestor with child nodes" do
nodes = vms[7].with_relationship_type(test_rel_type, &:ancestors)
expect(nodes).to eq([vms[0], vms[2]])
end
end
describe "#subtree" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:subtree)
expect(nodes).to eq([host])
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:subtree)
expect(nodes).to match_array(vms.values)
end
it "is a subtree with a tree's child node" do
nodes = vms[2].with_relationship_type(test_rel_type, &:subtree)
expect(nodes).to match_array([vms[2], vms[5], vms[6], vms[7], vms[8], vms[9]])
end
end
describe "#subtree_arranged" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:subtree_arranged)
expect(nodes).to eq(host => {})
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:subtree_arranged)
expect(nodes).to eq(
vms[0] => {
vms[1] => {
vms[3] => {},
vms[4] => {}
},
vms[2] => {
vms[5] => {},
vms[6] => {},
vms[7] => {
vms[8] => {},
vms[9] => {}
}
}
}
)
end
it "is a subtree with a tree's child node" do
nodes = vms[2].with_relationship_type(test_rel_type, &:subtree_arranged)
expect(nodes).to eq(
vms[2] => {
vms[5] => {},
vms[6] => {},
vms[7] => {
vms[8] => {},
vms[9] => {}
}
}
)
end
end
describe "#subtree_ids" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:subtree_ids)
expect(nodes).to eq([["Host", host.id]])
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:subtree_ids)
expect(nodes).to match_array(vms.values.map { |vm| ["VmOrTemplate", vm.id] })
end
it "is a subtree with a tree's child node" do
nodes = vms[2].with_relationship_type(test_rel_type, &:subtree_ids)
expect(nodes).to match_array(
[["VmOrTemplate", vms[2].id], ["VmOrTemplate", vms[5].id], ["VmOrTemplate", vms[6].id],
["VmOrTemplate", vms[7].id], ["VmOrTemplate", vms[8].id], ["VmOrTemplate", vms[9].id]]
)
end
end
describe "#subtree_ids_arranged" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:subtree_ids_arranged)
expect(nodes).to eq([host.class.name, host.id] => {})
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:subtree_ids_arranged)
expect(nodes).to eq(
["VmOrTemplate", vms[0].id] => {
["VmOrTemplate", vms[1].id] => {
["VmOrTemplate", vms[3].id] => {},
["VmOrTemplate", vms[4].id] => {}
},
["VmOrTemplate", vms[2].id] => {
["VmOrTemplate", vms[5].id] => {},
["VmOrTemplate", vms[6].id] => {},
["VmOrTemplate", vms[7].id] => {
["VmOrTemplate", vms[8].id] => {},
["VmOrTemplate", vms[9].id] => {}
}
}
}
)
end
it "is a subtree with a tree's child node" do
nodes = vms[2].with_relationship_type(test_rel_type, &:subtree_ids_arranged)
expect(nodes).to eq(
["VmOrTemplate", vms[2].id] => {
["VmOrTemplate", vms[5].id] => {},
["VmOrTemplate", vms[6].id] => {},
["VmOrTemplate", vms[7].id] => {
["VmOrTemplate", vms[8].id] => {},
["VmOrTemplate", vms[9].id] => {}
}
}
)
end
end
describe "#subtree_count" do
it "is 1 with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:subtree_count)
expect(nodes).to eq(1)
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:subtree_count)
expect(nodes).to eq(10)
end
it "is a subtree with a tree's child node" do
nodes = vms[2].with_relationship_type(test_rel_type, &:subtree_count)
expect(nodes).to eq(6)
end
end
describe "#descendants" do
it "is empty with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:descendants)
expect(nodes).to eq([])
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:descendants)
expect(nodes).to match_array([vms[1], vms[3], vms[4], vms[2], vms[5], vms[6], vms[7], vms[8], vms[9]])
end
it "is a subtree with a tree's child node" do
nodes = vms[2].with_relationship_type(test_rel_type, &:descendants)
expect(nodes).to match_array([vms[5], vms[6], vms[7], vms[8], vms[9]])
end
end
describe "#descendants_arranged" do
it "is empty with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:descendants_arranged)
expect(nodes).to eq({})
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:descendants_arranged)
expect(nodes).to eq(
vms[1] => {
vms[3] => {},
vms[4] => {}
},
vms[2] => {
vms[5] => {},
vms[6] => {},
vms[7] => {
vms[8] => {},
vms[9] => {}
}
}
)
end
it "is a subtree with a tree's child node" do
nodes = vms[2].with_relationship_type(test_rel_type, &:descendants_arranged)
expect(nodes).to eq(
vms[5] => {},
vms[6] => {},
vms[7] => {
vms[8] => {},
vms[9] => {}
}
)
end
end
describe "#descendant_ids_arranged" do
it "is empty with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:descendant_ids_arranged)
expect(nodes).to eq({})
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:descendant_ids_arranged)
expect(nodes).to eq(
["VmOrTemplate", vms[1].id] => {
["VmOrTemplate", vms[3].id] => {},
["VmOrTemplate", vms[4].id] => {}
},
["VmOrTemplate", vms[2].id] => {
["VmOrTemplate", vms[5].id] => {},
["VmOrTemplate", vms[6].id] => {},
["VmOrTemplate", vms[7].id] => {
["VmOrTemplate", vms[8].id] => {},
["VmOrTemplate", vms[9].id] => {}
}
}
)
end
it "is a subtree with a tree's child node" do
nodes = vms[2].with_relationship_type(test_rel_type, &:descendant_ids_arranged)
expect(nodes).to eq(
["VmOrTemplate", vms[5].id] => {},
["VmOrTemplate", vms[6].id] => {},
["VmOrTemplate", vms[7].id] => {
["VmOrTemplate", vms[8].id] => {},
["VmOrTemplate", vms[9].id] => {}
}
)
end
end
describe "#fulltree" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:fulltree)
expect(nodes).to eq([host])
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:fulltree)
expect(nodes).to match_array([vms[0], vms[1], vms[3], vms[4], vms[2], vms[5], vms[6], vms[7], vms[8], vms[9]])
end
it "is the full tree with a tree's child node" do
nodes = vms[8].with_relationship_type(test_rel_type, &:fulltree)
expect(nodes).to match_array([vms[0], vms[1], vms[3], vms[4], vms[2], vms[5], vms[6], vms[7], vms[8], vms[9]])
end
end
describe "#fulltree_arranged" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:fulltree_arranged)
expect(nodes).to eq(host => {})
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:fulltree_arranged)
expect(nodes).to eq(
vms[0] => {
vms[1] => {
vms[3] => {},
vms[4] => {}
},
vms[2] => {
vms[5] => {},
vms[6] => {},
vms[7] => {
vms[8] => {},
vms[9] => {}
}
}
}
)
end
it "is the full tree with a tree's child node" do
nodes = vms[8].with_relationship_type(test_rel_type, &:fulltree_arranged)
expect(nodes).to eq(
vms[0] => {
vms[1] => {
vms[3] => {},
vms[4] => {}
},
vms[2] => {
vms[5] => {},
vms[6] => {},
vms[7] => {
vms[8] => {},
vms[9] => {}
}
}
}
)
end
end
describe "#fulltree_ids_arranged" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:fulltree_ids_arranged)
expect(nodes).to eq([host.class.name, host.id] => {})
end
it "is the full tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:fulltree_ids_arranged)
expect(nodes).to eq(
["VmOrTemplate", vms[0].id] => {
["VmOrTemplate", vms[1].id] => {
["VmOrTemplate", vms[3].id] => {},
["VmOrTemplate", vms[4].id] => {}
},
["VmOrTemplate", vms[2].id] => {
["VmOrTemplate", vms[5].id] => {},
["VmOrTemplate", vms[6].id] => {},
["VmOrTemplate", vms[7].id] => {
["VmOrTemplate", vms[8].id] => {},
["VmOrTemplate", vms[9].id] => {}
}
}
}
)
end
it "is the full tree with a tree's child node" do
nodes = vms[8].with_relationship_type(test_rel_type, &:fulltree_ids_arranged)
expect(nodes).to eq(
["VmOrTemplate", vms[0].id] => {
["VmOrTemplate", vms[1].id] => {
["VmOrTemplate", vms[3].id] => {},
["VmOrTemplate", vms[4].id] => {}
},
["VmOrTemplate", vms[2].id] => {
["VmOrTemplate", vms[5].id] => {},
["VmOrTemplate", vms[6].id] => {},
["VmOrTemplate", vms[7].id] => {
["VmOrTemplate", vms[8].id] => {},
["VmOrTemplate", vms[9].id] => {}
}
}
}
)
end
end
describe "#fulltree_count" do
it "is self with with no relationships" do
nodes = host.with_relationship_type(test_rel_type, &:fulltree_count)
expect(nodes).to eq(1)
end
it "is the tree with a tree's root node" do
nodes = vms[0].with_relationship_type(test_rel_type, &:fulltree_count)
expect(nodes).to eq(10)
end
it "is the full tree with a tree's child node" do
nodes = vms[8].with_relationship_type(test_rel_type, &:fulltree_count)
expect(nodes).to eq(10)
end
end
describe "#parent_rels" do
it "works with relationships" do
pars = vms[8].with_relationship_type(test_rel_type, &:parent_rels)
pars_vms = pars.map(&:resource)
expect(pars_vms).to eq([vms[7]])
end
end
describe "#parent_rel_ids" do
it "works with relationships" do
ids = vms[8].with_relationship_type(test_rel_type, &:parent_rel_ids)
parent_vms = Relationship.where(:id => ids).map(&:resource)
expect(parent_vms).to eq([vms[7]])
end
it "works with cached relationships" do
ids = vms[8].with_relationship_type(test_rel_type) do |o|
# load relationships into the cache
o.all_relationships
o.parent_rel_ids
end
parent_vms = Relationship.where(:id => ids).map(&:resource)
expect(parent_vms).to eq([vms[7]])
end
end
describe "#grandchild_rels" do
it "works with relationships" do
vms[0].with_relationship_type(test_rel_type) do
rels = vms[0].grandchild_rels
expect(rels.map(&:resource)).to match_array([vms[3], vms[4], vms[5], vms[6], vms[7]])
end
end
end
describe "#grandchildren" do
it "works with relationships" do
vms[0].with_relationship_type(test_rel_type) do
expect(vms[0].grandchildren).to match_array([vms[3], vms[4], vms[5], vms[6], vms[7]])
end
end
end
describe "#child_and_grandchild_rels" do
it "works with relationships" do
vms[0].with_relationship_type(test_rel_type) do
rels = vms[0].child_and_grandchild_rels
expect(rels.map(&:resource)).to match_array([vms[1], vms[2], vms[3], vms[4], vms[5], vms[6], vms[7]])
end
end
end
protected
def build_relationship_tree(tree, rel_type = test_rel_type, base_factory = :vm_vmware)
# temp list of the relationships
# allows easy access while building
# can map to the resource to return all the resources created
rels = Hash.new do |hash, key|
hash[key] = FactoryBot.create(:relationship,
:resource => FactoryBot.create(base_factory),
:relationship => rel_type)
end
recurse_relationship_tree(tree) do |parent, child|
rels[child].parent = rels[parent]
rels[child].save!
end
# pull out all values in key order. (0, 1, 2, 3, ...) (unmemoize them on the way out)
rels.each_with_object({}) { |(n, v), h| h[n] = v.resource.tap(&:unmemoize_all) }
end
def recurse_relationship_tree(tree, &block)
parent = tree.keys.first
children = tree[parent]
children = children.collect { |child| child.kind_of?(Hash) ? recurse_relationship_tree(child, &block) : child }
children.each { |child| yield parent, child }
parent
end
def assert_parent_child_structure(rel_type, parent, p_rels_count, p_parents, p_children, child, c_rels_count, c_parents, c_children)
parent.with_relationship_type(rel_type) do
expect(parent.relationships.length).to eq(p_rels_count)
expect(parent.parents.length).to eq(p_parents.length)
expect(parent.parents).to match_array(p_parents)
expect(parent.children.length).to eq(p_children.length)
expect(parent.children).to match_array(p_children)
end
child.with_relationship_type(rel_type) do
expect(child.relationships.length).to eq(c_rels_count)
expect(child.parents.length).to eq(c_parents.length)
expect(child.parents).to match_array(c_parents)
expect(child.children.length).to eq(c_children.length)
expect(child.children).to match_array(c_children)
end
end
end
| 35.594092 | 165 | 0.582424 |
1dfa710ecccb2aab879904764dedc352c54dd12d | 115 | require 'active_json/version'
require 'active_json/query'
require 'active_json/filter'
require 'active_json/pluck'
| 23 | 29 | 0.826087 |
013187fcf57e5895ed3a56c4995583eebc5ec8c0 | 363 | module Preact
module FunctionComponent
module Mixin
def self.included(base)
base.include(::Preact::Component::Elements)
base.include(::Preact::FunctionComponent::Initializer)
base.include(::Preact::FunctionComponent::Api)
base.extend(::Preact::FunctionComponent::NativeComponentConstructor)
end
end
end
end
| 27.923077 | 76 | 0.694215 |
28d0bd987e009872e52fa856475413864fab0d3a | 826 | class FacebookBot
def send_message(data)
url = URI.parse("https://graph.facebook.com/v2.6/me/messages?access_token=#{ENV['ACCESS_TOKEN']}")
http = Net::HTTP.new(url.host, 443)
http.use_ssl = true
begin
request = Net::HTTP::Post.new(url.request_uri)
request["Content-Type"] = "application/json"
request.body = data.to_json
response = http.request(request)
body = JSON(response.body)
return { ret: body["error"].nil?, body: body }
rescue => e
raise e
end
end
def send_text_message(sender, text)
data = {
recipient: { id: sender },
message: { text: text }
}
send_message(data)
end
def send_generic_message(sender, data)
data = {
recipient: { id: sender },
message: data
}
send_message(data)
end
end
| 22.944444 | 102 | 0.613801 |
79d332c0f7bbf024db24e2ca0a170d0c8b4ed594 | 371 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Kusto::Mgmt::V2019_11_09
module Models
#
# Defines values for IdentityType
#
module IdentityType
None = "None"
SystemAssigned = "SystemAssigned"
end
end
end
| 21.823529 | 70 | 0.698113 |
d5c96de0f1b248c94d76ac1f81c133ba8732c46a | 101 | require 'bundler/setup'
require File.expand_path "../../spec/rails_app/config/environment", __FILE__
| 33.666667 | 76 | 0.782178 |
e9a518eec01d0f649c3a450597debeda35951ab7 | 2,695 | require 'spec_helper'
require 'fixtures/pubsub'
describe Blather::Stanza::PubSub::Subscribe do
it 'registers itself' do
Blather::XMPPNode.class_from_registration(:subscribe, 'http://jabber.org/protocol/pubsub').should == Blather::Stanza::PubSub::Subscribe
end
it 'can be imported' do
Blather::XMPPNode.parse(subscribe_xml).should be_instance_of Blather::Stanza::PubSub::Subscribe
end
it 'ensures an subscribe node is present on create' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node', 'jid'
subscribe.find('//ns:pubsub/ns:subscribe', :ns => Blather::Stanza::PubSub.registered_ns).should_not be_empty
end
it 'ensures an subscribe node exists when calling #subscribe' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node', 'jid'
subscribe.pubsub.remove_children :subscribe
subscribe.find('//ns:pubsub/ns:subscribe', :ns => Blather::Stanza::PubSub.registered_ns).should be_empty
subscribe.subscribe.should_not be_nil
subscribe.find('//ns:pubsub/ns:subscribe', :ns => Blather::Stanza::PubSub.registered_ns).should_not be_empty
end
it 'defaults to a set node' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node', 'jid'
subscribe.type.should == :set
end
it 'sets the host if requested' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'pubsub.jabber.local', 'node', 'jid'
subscribe.to.should == Blather::JID.new('pubsub.jabber.local')
end
it 'sets the node' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node-name', 'jid'
subscribe.node.should == 'node-name'
end
it 'has a node attribute' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node-name', 'jid'
subscribe.find('//ns:pubsub/ns:subscribe[@node="node-name"]', :ns => Blather::Stanza::PubSub.registered_ns).should_not be_empty
subscribe.node.should == 'node-name'
subscribe.node = 'new-node'
subscribe.find('//ns:pubsub/ns:subscribe[@node="new-node"]', :ns => Blather::Stanza::PubSub.registered_ns).should_not be_empty
subscribe.node.should == 'new-node'
end
it 'has a jid attribute' do
subscribe = Blather::Stanza::PubSub::Subscribe.new :set, 'host', 'node-name', 'jid'
subscribe.find('//ns:pubsub/ns:subscribe[@jid="jid"]', :ns => Blather::Stanza::PubSub.registered_ns).should_not be_empty
subscribe.jid.should == Blather::JID.new('jid')
subscribe.jid = Blather::JID.new('n@d/r')
subscribe.find('//ns:pubsub/ns:subscribe[@jid="n@d/r"]', :ns => Blather::Stanza::PubSub.registered_ns).should_not be_empty
subscribe.jid.should == Blather::JID.new('n@d/r')
end
end
| 43.467742 | 139 | 0.70167 |
18ebc7ad87f628d301ad81ccd6cddc4520082f50 | 978 | require 'spec_helper'
describe Squall::Transaction do
before(:each) do
@transaction = Squall::Transaction.new
@keys = ["pid", "created_at", "updated_at", "actor", "priority",
"parent_type", "action", "id", "user_id", "dependent_transaction_id",
"allowed_cancel", "parent_id", "params", "log_output", "status", "identifier"
]
end
describe "#list" do
around do |example|
VCR.use_cassette 'transaction/list' do
example.call
end
end
it "lists transactions" do
list = @transaction.list
list.size.should be(3)
first = list.first
first.keys.should include(*@keys)
end
end
describe "#show" do
around do |example|
VCR.use_cassette 'transaction/show' do
example.call
end
end
it "returns a transaction" do
transaction = @transaction.show(1)
transaction.keys.should include(*@keys)
transaction['pid'].should == 2180
end
end
end
| 22.227273 | 83 | 0.623722 |
b938fc139c9285f901ca9ec3d68c023b5e7087f5 | 319 | require 'spec_helper'
describe name_from_filename do
include_examples 'module'
def self.targets
%w(Generic)
end
def self.elements
[ Element::COOKIE ]
end
def issue_count
2
end
easy_test { issues.map { |i| i.var }.sort.should == %w(cookie cookie2).sort }
end
| 15.95 | 81 | 0.61442 |
d5ec7763f1e129301e1a6e612934d06522332f5f | 1,371 | # == Schema Information
#
# Table name: referentials
#
# id :bigint(8) not null, primary key
# man_day_duration_in_seconds :integer
# work_per_day_duration_in_seconds :integer
# mission_id :bigint(8)
# created_at :datetime not null
# updated_at :datetime not null
#
class Referential < ApplicationRecord
include Defaultable
include Durationable
DEFAULT_MAN_DAY_DURATION = '7h30'
DEFAULT_WORK_PER_DAY_DURATION = '1h30'
belongs_to :mission, optional: true
has_duration :man_day
has_duration :work_per_day
has_default(
man_day_duration_in_seconds: -> { default.man_day_duration_in_seconds },
work_per_day_duration_in_seconds: -> { default.work_per_day_duration_in_seconds }
)
scope :settings, -> { where mission_id: nil }
scope :default, -> { first || Referential.settings.first_or_create(man_day_duration: DEFAULT_MAN_DAY_DURATION, work_per_day_duration: DEFAULT_WORK_PER_DAY_DURATION) }
def to_s
name
end
def setting?
mission.blank?
end
def default
self.class.settings.default
end
def actual_duration_in_seconds of:
return 0 if of.zero?
(1.day.to_f/work_per_day_duration_in_seconds*of).to_i
end
def name
"#{work_per_day_duration}/#{man_day_duration}"
end
end
| 26.365385 | 168 | 0.687819 |
0142706d140039b54b98fa3e3e1e713713bfac3e | 16,560 | require 'spec_helper'
describe Discussion, model: true do
subject { described_class.new([first_note, second_note, third_note]) }
let(:first_note) { create(:diff_note_on_merge_request) }
let(:second_note) { create(:diff_note_on_merge_request) }
let(:third_note) { create(:diff_note_on_merge_request) }
describe "#resolvable?" do
context "when a diff discussion" do
before do
allow(subject).to receive(:diff_discussion?).and_return(true)
end
context "when all notes are unresolvable" do
before do
allow(first_note).to receive(:resolvable?).and_return(false)
allow(second_note).to receive(:resolvable?).and_return(false)
allow(third_note).to receive(:resolvable?).and_return(false)
end
it "returns false" do
expect(subject.resolvable?).to be false
end
end
context "when some notes are unresolvable and some notes are resolvable" do
before do
allow(first_note).to receive(:resolvable?).and_return(true)
allow(second_note).to receive(:resolvable?).and_return(false)
allow(third_note).to receive(:resolvable?).and_return(true)
end
it "returns true" do
expect(subject.resolvable?).to be true
end
end
context "when all notes are resolvable" do
before do
allow(first_note).to receive(:resolvable?).and_return(true)
allow(second_note).to receive(:resolvable?).and_return(true)
allow(third_note).to receive(:resolvable?).and_return(true)
end
it "returns true" do
expect(subject.resolvable?).to be true
end
end
end
context "when not a diff discussion" do
before do
allow(subject).to receive(:diff_discussion?).and_return(false)
end
it "returns false" do
expect(subject.resolvable?).to be false
end
end
end
describe "#resolved?" do
context "when not resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(false)
end
it "returns false" do
expect(subject.resolved?).to be false
end
end
context "when resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(true)
allow(first_note).to receive(:resolvable?).and_return(true)
allow(second_note).to receive(:resolvable?).and_return(false)
allow(third_note).to receive(:resolvable?).and_return(true)
end
context "when all resolvable notes are resolved" do
before do
allow(first_note).to receive(:resolved?).and_return(true)
allow(third_note).to receive(:resolved?).and_return(true)
end
it "returns true" do
expect(subject.resolved?).to be true
end
end
context "when some resolvable notes are not resolved" do
before do
allow(first_note).to receive(:resolved?).and_return(true)
allow(third_note).to receive(:resolved?).and_return(false)
end
it "returns false" do
expect(subject.resolved?).to be false
end
end
end
end
describe "#to_be_resolved?" do
context "when not resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(false)
end
it "returns false" do
expect(subject.to_be_resolved?).to be false
end
end
context "when resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(true)
allow(first_note).to receive(:resolvable?).and_return(true)
allow(second_note).to receive(:resolvable?).and_return(false)
allow(third_note).to receive(:resolvable?).and_return(true)
end
context "when all resolvable notes are resolved" do
before do
allow(first_note).to receive(:resolved?).and_return(true)
allow(third_note).to receive(:resolved?).and_return(true)
end
it "returns false" do
expect(subject.to_be_resolved?).to be false
end
end
context "when some resolvable notes are not resolved" do
before do
allow(first_note).to receive(:resolved?).and_return(true)
allow(third_note).to receive(:resolved?).and_return(false)
end
it "returns true" do
expect(subject.to_be_resolved?).to be true
end
end
end
end
describe "#can_resolve?" do
let(:current_user) { create(:user) }
context "when not resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(false)
end
it "returns false" do
expect(subject.can_resolve?(current_user)).to be false
end
end
context "when resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(true)
end
context "when not signed in" do
let(:current_user) { nil }
it "returns false" do
expect(subject.can_resolve?(current_user)).to be false
end
end
context "when signed in" do
context "when the signed in user is the noteable author" do
before do
subject.noteable.author = current_user
end
it "returns true" do
expect(subject.can_resolve?(current_user)).to be true
end
end
context "when the signed in user can push to the project" do
before do
subject.project.team << [current_user, :master]
end
it "returns true" do
expect(subject.can_resolve?(current_user)).to be true
end
end
context "when the signed in user is a random user" do
it "returns false" do
expect(subject.can_resolve?(current_user)).to be false
end
end
end
end
end
describe "#resolve!" do
let(:current_user) { create(:user) }
context "when not resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(false)
end
it "returns nil" do
expect(subject.resolve!(current_user)).to be_nil
end
it "doesn't set resolved_at" do
subject.resolve!(current_user)
expect(subject.resolved_at).to be_nil
end
it "doesn't set resolved_by" do
subject.resolve!(current_user)
expect(subject.resolved_by).to be_nil
end
it "doesn't mark as resolved" do
subject.resolve!(current_user)
expect(subject.resolved?).to be false
end
end
context "when resolvable" do
let(:user) { create(:user) }
let(:second_note) { create(:diff_note_on_commit) } # unresolvable
before do
allow(subject).to receive(:resolvable?).and_return(true)
end
context "when all resolvable notes are resolved" do
before do
first_note.resolve!(user)
third_note.resolve!(user)
first_note.reload
third_note.reload
end
it "doesn't change resolved_at on the resolved notes" do
expect(first_note.resolved_at).not_to be_nil
expect(third_note.resolved_at).not_to be_nil
expect { subject.resolve!(current_user) }.not_to change { first_note.resolved_at }
expect { subject.resolve!(current_user) }.not_to change { third_note.resolved_at }
end
it "doesn't change resolved_by on the resolved notes" do
expect(first_note.resolved_by).to eq(user)
expect(third_note.resolved_by).to eq(user)
expect { subject.resolve!(current_user) }.not_to change { first_note.resolved_by }
expect { subject.resolve!(current_user) }.not_to change { third_note.resolved_by }
end
it "doesn't change the resolved state on the resolved notes" do
expect(first_note.resolved?).to be true
expect(third_note.resolved?).to be true
expect { subject.resolve!(current_user) }.not_to change { first_note.resolved? }
expect { subject.resolve!(current_user) }.not_to change { third_note.resolved? }
end
it "doesn't change resolved_at" do
expect(subject.resolved_at).not_to be_nil
expect { subject.resolve!(current_user) }.not_to change { subject.resolved_at }
end
it "doesn't change resolved_by" do
expect(subject.resolved_by).to eq(user)
expect { subject.resolve!(current_user) }.not_to change { subject.resolved_by }
end
it "doesn't change resolved state" do
expect(subject.resolved?).to be true
expect { subject.resolve!(current_user) }.not_to change { subject.resolved? }
end
end
context "when some resolvable notes are resolved" do
before do
first_note.resolve!(user)
end
it "doesn't change resolved_at on the resolved note" do
expect(first_note.resolved_at).not_to be_nil
expect { subject.resolve!(current_user) }.
not_to change { first_note.reload.resolved_at }
end
it "doesn't change resolved_by on the resolved note" do
expect(first_note.resolved_by).to eq(user)
expect { subject.resolve!(current_user) }.
not_to change { first_note.reload && first_note.resolved_by }
end
it "doesn't change the resolved state on the resolved note" do
expect(first_note.resolved?).to be true
expect { subject.resolve!(current_user) }.
not_to change { first_note.reload && first_note.resolved? }
end
it "sets resolved_at on the unresolved note" do
subject.resolve!(current_user)
third_note.reload
expect(third_note.resolved_at).not_to be_nil
end
it "sets resolved_by on the unresolved note" do
subject.resolve!(current_user)
third_note.reload
expect(third_note.resolved_by).to eq(current_user)
end
it "marks the unresolved note as resolved" do
subject.resolve!(current_user)
third_note.reload
expect(third_note.resolved?).to be true
end
it "sets resolved_at" do
subject.resolve!(current_user)
expect(subject.resolved_at).not_to be_nil
end
it "sets resolved_by" do
subject.resolve!(current_user)
expect(subject.resolved_by).to eq(current_user)
end
it "marks as resolved" do
subject.resolve!(current_user)
expect(subject.resolved?).to be true
end
end
context "when no resolvable notes are resolved" do
it "sets resolved_at on the unresolved notes" do
subject.resolve!(current_user)
first_note.reload
third_note.reload
expect(first_note.resolved_at).not_to be_nil
expect(third_note.resolved_at).not_to be_nil
end
it "sets resolved_by on the unresolved notes" do
subject.resolve!(current_user)
first_note.reload
third_note.reload
expect(first_note.resolved_by).to eq(current_user)
expect(third_note.resolved_by).to eq(current_user)
end
it "marks the unresolved notes as resolved" do
subject.resolve!(current_user)
first_note.reload
third_note.reload
expect(first_note.resolved?).to be true
expect(third_note.resolved?).to be true
end
it "sets resolved_at" do
subject.resolve!(current_user)
first_note.reload
third_note.reload
expect(subject.resolved_at).not_to be_nil
end
it "sets resolved_by" do
subject.resolve!(current_user)
first_note.reload
third_note.reload
expect(subject.resolved_by).to eq(current_user)
end
it "marks as resolved" do
subject.resolve!(current_user)
first_note.reload
third_note.reload
expect(subject.resolved?).to be true
end
end
end
end
describe "#unresolve!" do
context "when not resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(false)
end
it "returns nil" do
expect(subject.unresolve!).to be_nil
end
end
context "when resolvable" do
let(:user) { create(:user) }
before do
allow(subject).to receive(:resolvable?).and_return(true)
allow(first_note).to receive(:resolvable?).and_return(true)
allow(second_note).to receive(:resolvable?).and_return(false)
allow(third_note).to receive(:resolvable?).and_return(true)
end
context "when all resolvable notes are resolved" do
before do
first_note.resolve!(user)
third_note.resolve!(user)
end
it "unsets resolved_at on the resolved notes" do
subject.unresolve!
first_note.reload
third_note.reload
expect(first_note.resolved_at).to be_nil
expect(third_note.resolved_at).to be_nil
end
it "unsets resolved_by on the resolved notes" do
subject.unresolve!
first_note.reload
third_note.reload
expect(first_note.resolved_by).to be_nil
expect(third_note.resolved_by).to be_nil
end
it "unmarks the resolved notes as resolved" do
subject.unresolve!
first_note.reload
third_note.reload
expect(first_note.resolved?).to be false
expect(third_note.resolved?).to be false
end
it "unsets resolved_at" do
subject.unresolve!
first_note.reload
third_note.reload
expect(subject.resolved_at).to be_nil
end
it "unsets resolved_by" do
subject.unresolve!
first_note.reload
third_note.reload
expect(subject.resolved_by).to be_nil
end
it "unmarks as resolved" do
subject.unresolve!
expect(subject.resolved?).to be false
end
end
context "when some resolvable notes are resolved" do
before do
first_note.resolve!(user)
end
it "unsets resolved_at on the resolved note" do
subject.unresolve!
expect(subject.first_note.resolved_at).to be_nil
end
it "unsets resolved_by on the resolved note" do
subject.unresolve!
expect(subject.first_note.resolved_by).to be_nil
end
it "unmarks the resolved note as resolved" do
subject.unresolve!
expect(subject.first_note.resolved?).to be false
end
end
end
end
describe "#collapsed?" do
context "when a diff discussion" do
before do
allow(subject).to receive(:diff_discussion?).and_return(true)
end
context "when resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(true)
end
context "when resolved" do
before do
allow(subject).to receive(:resolved?).and_return(true)
end
it "returns true" do
expect(subject.collapsed?).to be true
end
end
context "when not resolved" do
before do
allow(subject).to receive(:resolved?).and_return(false)
end
it "returns false" do
expect(subject.collapsed?).to be false
end
end
end
context "when not resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(false)
end
context "when active" do
before do
allow(subject).to receive(:active?).and_return(true)
end
it "returns false" do
expect(subject.collapsed?).to be false
end
end
context "when outdated" do
before do
allow(subject).to receive(:active?).and_return(false)
end
it "returns true" do
expect(subject.collapsed?).to be true
end
end
end
end
context "when not a diff discussion" do
before do
allow(subject).to receive(:diff_discussion?).and_return(false)
end
it "returns false" do
expect(subject.collapsed?).to be false
end
end
end
end
| 27.878788 | 92 | 0.61401 |
4a82eacd5380b821609df2efed5bb91d93cd25ff | 1,114 | Dummy::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
end
| 37.133333 | 85 | 0.771095 |
e9ea54a5966f54f92ad86748a7c2ba544695b592 | 526 | module AssetCloud
class InvalidBucketError < StandardError
end
class InvalidBucket < Bucket
Error = "No such namespace: %s".freeze
def ls(namespace)
raise InvalidBucketError, Error % namespace
end
def read(key)
raise InvalidBucketError, Error % key
end
def write(key, data)
raise InvalidBucketError, Error % key
end
def delete(key)
raise InvalidBucketError, Error % key
end
def stat(key)
raise InvalidBucketError, Error % key
end
end
end
| 18.137931 | 49 | 0.663498 |
1d9faa32a2b4e2968b837cec85430375bfebe396 | 1,544 | require 'rescue_unique_constraint/version'
require 'rescue_unique_constraint/index'
require 'rescue_unique_constraint/rescue_handler'
require 'rescue_unique_constraint/adapter/mysql_adapter'
require 'rescue_unique_constraint/adapter/postgresql_adapter'
require 'rescue_unique_constraint/adapter/sqlite_adapter'
require 'active_record'
# Module which will rescue ActiveRecord::RecordNotUnique exceptions
# and add errors for indexes that are registered with
# rescue_unique_constraint(index:, field:)
module RescueUniqueConstraint
def self.included(base)
base.extend(ClassMethods)
end
# methods mixed into ActiveRecord class
module ClassMethods
def index_rescue_handler
@_index_rescue_handler ||= RescueUniqueConstraint::RescueHandler.new(self)
end
def rescue_unique_constraint(index:, field:, scope:)
unless method_defined?(:create_or_update_with_rescue)
define_method(:create_or_update_with_rescue) do |*|
begin
create_or_update_without_rescue
rescue ActiveRecord::RecordNotUnique => e
self.class.index_rescue_handler.matching_indexes(e).each do |matching_index|
errors.add(matching_index.field, :taken, scope: matching_index.scope)
end
return false
end
true
end
alias_method :create_or_update_without_rescue, :create_or_update
alias_method :create_or_update, :create_or_update_with_rescue
end
index_rescue_handler.add_index(index, field)
end
end
end
| 35.090909 | 88 | 0.751943 |
79d22a506b11b3c50306e04fa2af86fbbac1a0ed | 2,927 | require_relative '../../spec_helper'
require_relative 'fixtures/classes'
describe "Module#class_variable_get" do
it "returns the value of the class variable with the given name" do
c = Class.new { class_variable_set :@@class_var, "test" }
c.send(:class_variable_get, :@@class_var).should == "test"
c.send(:class_variable_get, "@@class_var").should == "test"
end
it "returns the value of a class variable with the given name defined in an included module" do
c = Class.new { include ModuleSpecs::MVars }
c.send(:class_variable_get, "@@mvar").should == :mvar
end
it "raises a NameError for a class variable named '@@'" do
c = Class.new
-> { c.send(:class_variable_get, "@@") }.should raise_error(NameError)
-> { c.send(:class_variable_get, :"@@") }.should raise_error(NameError)
end
it "raises a NameError for a class variables with the given name defined in an extended module" do
c = Class.new
c.extend ModuleSpecs::MVars
-> {
c.send(:class_variable_get, "@@mvar")
}.should raise_error(NameError)
end
it "returns class variables defined in the class body and accessed in the metaclass" do
ModuleSpecs::CVars.cls.should == :class
end
it "returns class variables defined in the metaclass and accessed by class methods" do
ModuleSpecs::CVars.meta.should == :metainfo
end
it "returns class variables defined in the metaclass and accessed by instance methods" do
ModuleSpecs::CVars.new.meta.should == :metainfo
end
it "returns a class variable defined in a metaclass" do
obj = mock("metaclass class variable")
meta = obj.singleton_class
meta.send :class_variable_set, :@@var, :cvar_value
meta.send(:class_variable_get, :@@var).should == :cvar_value
end
it "raises a NameError when an uninitialized class variable is accessed" do
c = Class.new
[:@@no_class_var, "@@no_class_var"].each do |cvar|
-> { c.send(:class_variable_get, cvar) }.should raise_error(NameError)
end
end
it "raises a NameError when the given name is not allowed" do
c = Class.new
-> { c.send(:class_variable_get, :invalid_name) }.should raise_error(NameError)
-> { c.send(:class_variable_get, "@invalid_name") }.should raise_error(NameError)
end
it "converts a non string/symbol/fixnum name to string using to_str" do
c = Class.new { class_variable_set :@@class_var, "test" }
(o = mock('@@class_var')).should_receive(:to_str).and_return("@@class_var")
c.send(:class_variable_get, o).should == "test"
end
it "raises a TypeError when the given names can't be converted to strings using to_str" do
c = Class.new { class_variable_set :@@class_var, "test" }
o = mock('123')
-> { c.send(:class_variable_get, o) }.should raise_error(TypeError)
o.should_receive(:to_str).and_return(123)
-> { c.send(:class_variable_get, o) }.should raise_error(TypeError)
end
end
| 38.012987 | 100 | 0.696959 |
e9c7aa69a122dcb1b45bec9e7ae40d48f96694b5 | 7,416 | module Bosh::Cli
module Command
class Base
extend Bosh::Cli::CommandDiscovery
include Bosh::Cli::DeploymentHelper
attr_accessor :options, :out, :args
attr_reader :work_dir, :exit_code, :runner
DEFAULT_DIRECTOR_PORT = 25555
# @param [Bosh::Cli::Runner] runner
def initialize(runner = nil, director = nil)
@runner = runner
@director = director
@options = {}
@work_dir = Dir.pwd
@exit_code = 0
@out = nil
@args = []
end
# @return [Bosh::Cli::Config] Current configuration
def config
@config ||= begin
# Handle the environment variable being set to the empty string.
env_bosh_config = ENV['BOSH_CONFIG'].to_s.empty? ? nil : ENV['BOSH_CONFIG']
config_file = options[:config] || env_bosh_config || Bosh::Cli::DEFAULT_CONFIG_PATH
Bosh::Cli::Config.new(config_file)
end
end
def add_option(name, value)
@options[name] = value
end
def remove_option(name)
@options.delete(name)
end
def director
@director ||= Bosh::Cli::Client::Director.new(
target, credentials, @options.select { |k, _| k == :no_track })
end
def release
return @release if @release
check_if_release_dir
@release = Bosh::Cli::Release.new(work_dir, options[:final])
end
def progress_renderer
interactive? ? Bosh::Cli::InteractiveProgressRenderer.new : Bosh::Cli::NonInteractiveProgressRenderer.new
end
def blob_manager
@blob_manager ||= Bosh::Cli::BlobManager.new(release, config.max_parallel_downloads, progress_renderer)
end
def blobstore
release.blobstore
end
def logged_in?
!!(credentials && credentials.authorization_header)
end
def non_interactive?
options[:non_interactive]
end
def interactive?
!non_interactive?
end
def verbose?
@options[:verbose]
end
def redirect(*args)
Bosh::Cli::Runner.new(args, @options).run
end
def confirmed?(question = 'Are you sure?')
return true if non_interactive?
ask("#{question} (type 'yes' to continue): ") == 'yes'
end
# @return [String] Target director URL
def target
raw_url = options[:target] || config.target
url = config.resolve_alias(:target, raw_url) || raw_url
url ? normalize_url(url) : nil
end
alias_method :target_url, :target
# @return [String] Deployment manifest path
def deployment
options[:deployment] || config.deployment
end
def credentials
return @credentials if @credentials
if auth_info.uaa?
token_decoder = Client::Uaa::TokenDecoder.new
uaa_token_provider = Client::Uaa::TokenProvider.new(auth_info, config, token_decoder, target)
@credentials = Client::UaaCredentials.new(uaa_token_provider)
elsif username && password
@credentials = Client::BasicCredentials.new(username, password)
end
@credentials
end
def target_name
options[:target] || config.target_name || target_url
end
def cache_dir
File.join(Dir.home, '.bosh', 'cache')
end
def show_current_state(deployment_name=nil)
user_desc = auth_info.client_auth? ? 'client' : 'user'
msg = "Acting as #{user_desc} '#{credentials.username.to_s.make_green}'"
msg += " on deployment '#{deployment_name.make_green}'" if deployment_name
msg += " on '#{target_name.make_green}'" if target_name
warn(msg)
end
protected
def auth_info
@auth_info ||= begin
director_client = Client::Director.new(target)
Client::Uaa::AuthInfo.new(director_client, ENV, config.ca_cert(target))
end
end
# @return [String] Director username
def username
options[:username] || ENV['BOSH_USER'] || config.username(target)
end
# @return [String] Director password
def password
options[:password] || ENV['BOSH_PASSWORD'] || config.password(target)
end
# Prints director task completion report. Note that event log usually
# contains pretty detailed error report and other UI niceties, so most
# of the time this could just do nothing
# @param [Symbol] status Task status
# @param [#to_s] task_id Task ID
def task_report(status, task_id, success_msg = nil, error_msg = nil)
case status
when :non_trackable
report = "Can't track director task".make_red
when :track_timeout
report = 'Task tracking timeout'.make_red
when :running
report = "Task #{task_id.make_yellow} running"
when :error
report = error_msg
when :done
report = success_msg
else
report = "Task exited with status #{status}"
end
unless [:running, :done].include?(status)
@exit_code = 1
end
say("\n#{report}") if report
say("\nFor a more detailed error report, run: bosh task #{task_id} --debug") if status == :error
end
def auth_required
target_required
err('Please log in first') unless logged_in?
end
def target_required
err('Please choose target first') if target.nil?
end
def deployment_required
err('Please choose deployment first') if deployment.nil?
end
def show_deployment
say("Current deployment is #{deployment.make_green}")
end
def no_track_unsupported
if @options.delete(:no_track)
say('Ignoring `' + '--no-track'.make_yellow + "' option")
end
end
def check_if_release_dir
unless in_release_dir?
err("Sorry, your current directory doesn't look like release directory")
end
end
def raise_dirty_state_error
say("\n%s\n" % [`git status`])
err('Your current directory has some local modifications, ' +
"please discard or commit them first.\n\n" +
'Use the --force option to skip this check.')
end
def in_release_dir?
File.directory?('packages') &&
File.directory?('jobs') &&
File.directory?('src')
end
def dirty_state?
git_status = `git status 2>&1`
case $?.exitstatus
when 128 # Not in a git repo
false
when 127 # git command not found
false
else
!git_status.lines.to_a.last.include?('nothing to commit')
end
end
def valid_index_for(manifest_hash, job, index, options = {})
index = '0' if job_unique_in_deployment?(manifest_hash, job)
err('You should specify the job index. There is more than one instance of this job type.') if index.nil?
index = index.to_i if options[:integer_index]
index
end
def normalize_url(url)
url = url.gsub(/\/$/, '')
url = "https://#{url}" unless url.match(/^http:?/)
uri = URI.parse(url)
if port = url.match(/:(\d+)$/)
port_number = port.captures[0].to_i
if port_number == URI::HTTPS::DEFAULT_PORT
uri.to_s + ":#{URI::HTTPS::DEFAULT_PORT}"
else
uri.port = port_number
uri.to_s
end
else
uri.port = DEFAULT_DIRECTOR_PORT
uri.to_s
end
end
end
end
end
| 28.197719 | 113 | 0.612325 |
1d89430cc5cdc68d7a502c9739626d21b6cd7100 | 2,255 | # frozen_string_literal: true
module Geo
module BlobReplicatorStrategy
extend ActiveSupport::Concern
include Delay
include Gitlab::Geo::LogHelpers
included do
event :created
end
class_methods do
end
def handle_after_create_commit
publish(:created, **created_params)
return unless Feature.enabled?(:geo_self_service_framework)
schedule_checksum_calculation if needs_checksum?
end
# Called by Gitlab::Geo::Replicator#consume
def consume_created_event
download
end
# Return the carrierwave uploader instance scoped to current model
#
# @abstract
# @return [Carrierwave::Uploader]
def carrierwave_uploader
raise NotImplementedError
end
def calculate_checksum!
checksum = model_record.calculate_checksum!
update_verification_state!(checksum: checksum)
rescue => e
log_error('Error calculating the checksum', e)
update_verification_state!(failure: e.message)
end
# Check if given checksum matches known one
#
# @param [String] checksum
# @return [Boolean] whether checksum matches
def matches_checksum?(checksum)
model_record.verification_checksum == checksum
end
private
def update_verification_state!(checksum: nil, failure: nil)
retry_at, retry_count = calculate_next_retry_attempt if failure.present?
model_record.update!(
verification_checksum: checksum,
verified_at: Time.now,
verification_failure: failure,
verification_retry_at: retry_at,
verification_retry_count: retry_count
)
end
def calculate_next_retry_attempt
retry_count = model_record.verification_retry_count.to_i + 1
[next_retry_time(retry_count), retry_count]
end
def download
::Geo::BlobDownloadService.new(replicator: self).execute
end
def schedule_checksum_calculation
Geo::BlobVerificationPrimaryWorker.perform_async(replicable_name, model_record.id)
end
def created_params
{ model_record_id: model_record.id }
end
def needs_checksum?
return true unless model_record.respond_to?(:needs_checksum?)
model_record.needs_checksum?
end
end
end
| 24.51087 | 88 | 0.711308 |
1c16a6a65413aece10018c469e6dbe3ac6dc7315 | 724 | require 'rubygems'
require 'sinatra'
require 'sinatra/json'
require "sinatra/reloader" if development?
require 'net/http'
require 'net/https'
require 'uri'
require 'json'
require 'time'
require_relative './places'
require_relative './weather'
require_relative './google_maps'
DIRECTIONS_URI = URI("https://maps.googleapis.com/maps/api/directions/json")
PLACES_URI = URI("https://maps.googleapis.com/maps/api/place/nearbysearch/json")
GEOCODE_URI = URI("https://maps.googleapis.com/maps/api/geocode/json")
API_KEY = "AIzaSyA4aFp7KfGDr9g6MEems8GjxtRqufkZBFE"
get '/' do
haml :index
end
post '/' do
request.body.rewind
data = JSON.parse request.body.read
json Places.grocery(data["address"])
end
| 24.133333 | 85 | 0.741713 |
e27abf6091242afa7359d144811b1ee4ce8fc41f | 1,009 | name 'openstack-object-storage'
maintainer 'ATT, Inc.'
license 'Apache 2.0'
description 'Installs and configures Openstack Swift'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '9.0.3'
recipe 'openstack-object-storage::account-server', 'Installs the swift account server'
recipe 'openstack-object-storage::client', 'Install the switch client'
recipe 'openstack-object-storage::container-server', 'Installs the swift container server'
recipe 'openstack-object-storage::object-server', 'Installs the swift object server'
recipe 'openstack-object-storage::proxy-server', 'Installs the swift proxy server'
recipe 'openstack-object-storage::setup', 'Does initial setup of a swift cluster'
%w{ centos ubuntu }.each do |os|
supports os
end
depends 'memcached', '>= 1.7.2'
depends 'statsd', '>= 0.1.5'
depends 'apt', '>= 2.3.8'
depends 'openstack-common', '~> 9.0'
| 43.869565 | 101 | 0.667988 |
ff308c34dfc5adf53666eb423a90921e4e909ed6 | 1,064 | # coding: utf-8
require File.dirname(__FILE__) + '/spec_helper.rb'
describe ONIX::Subject do
before(:each) do
data_path = File.join(File.dirname(__FILE__),"..","data")
file1 = File.join(data_path, "subject.xml")
@doc = Nokogiri::XML::Document.parse(File.read(file1))
@root = @doc.root
end
it "should correctly convert to a string" do
sub = ONIX::Subject.from_xml(@root.to_s)
sub.to_xml.to_s[0,9].should eql("<Subject>")
end
it "should provide read access to first level attributes" do
sub = ONIX::Subject.from_xml(@root.to_s)
sub.subject_scheme_id.should eql(3)
sub.subject_scheme_name.should eql("RBA Subjects")
sub.subject_code.should eql("AABB")
end
it "should provide write access to first level attributes" do
sub = ONIX::Subject.new
sub.subject_scheme_id = 2
sub.to_xml.to_s.include?("<SubjectSchemeIdentifier>02</SubjectSchemeIdentifier>").should be_true
sub.subject_code = "ABCD"
sub.to_xml.to_s.include?("<SubjectCode>ABCD</SubjectCode>").should be_true
end
end
| 28 | 100 | 0.697368 |
1a46e64bd6b334b7d2c0ce0f42ac7983af340577 | 1,169 | Given(/^the following documents exist:$/) do |table|
table.hashes.each do |hash|
project = Document.create(hash)
project.save
end
end
When(/^I click the "([^"]*)" button for document "([^"]*)"$/) do |button, document_name|
document = Document.find_by_title(document_name)
if document
within("tr##{document.id}") do
click_link_or_button button
end
else
visit path_to(button, 'non-existent')
end
end
When(/^I should not see the document "([^"]*)"$/) do |title|
page.should have_text title, visible: false
end
#| Howto | How to start | 2 | 55 | 33 |
#| Another doc | My content | 2 | 66 | 33 |
#| Howto 2 | My documentation | 1 | 77 | 44 |
#When(/^I click the "([^"]*)" button for project "([^"]*)"$/) do |button, project_name|
# project = Project.find_by_title(project_name)
# if project
# within("tr##{project.id}") do
# click_link_or_button button
# end
# else
# visit path_to(button, 'non-existent')
# end
#end
When(/^I click the sidebar link "([^"]*)"$/) do |link|
within('ul#sidebar') do
click_link_or_button link
end
end | 27.833333 | 88 | 0.596236 |
4a6fd2c02a86ffe407275025cf2bf889fb16dbb9 | 2,855 | # -*- encoding: utf-8 -*-
# stub: rails 5.1.5 ruby lib
Gem::Specification.new do |s|
s.name = "rails"
s.version = "5.1.5"
s.required_rubygems_version = Gem::Requirement.new(">= 1.8.11") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["David Heinemeier Hansson"]
s.date = "2018-02-14"
s.description = "Ruby on Rails is a full-stack web framework optimized for programmer happiness and sustainable productivity. It encourages beautiful code by favoring convention over configuration."
s.email = "[email protected]"
s.homepage = "http://rubyonrails.org"
s.licenses = ["MIT"]
s.required_ruby_version = Gem::Requirement.new(">= 2.2.2")
s.rubygems_version = "2.5.1"
s.summary = "Full-stack web application framework."
s.installed_by_version = "2.5.1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>, ["= 5.1.5"])
s.add_runtime_dependency(%q<actionpack>, ["= 5.1.5"])
s.add_runtime_dependency(%q<actionview>, ["= 5.1.5"])
s.add_runtime_dependency(%q<activemodel>, ["= 5.1.5"])
s.add_runtime_dependency(%q<activerecord>, ["= 5.1.5"])
s.add_runtime_dependency(%q<actionmailer>, ["= 5.1.5"])
s.add_runtime_dependency(%q<activejob>, ["= 5.1.5"])
s.add_runtime_dependency(%q<actioncable>, ["= 5.1.5"])
s.add_runtime_dependency(%q<railties>, ["= 5.1.5"])
s.add_runtime_dependency(%q<bundler>, [">= 1.3.0"])
s.add_runtime_dependency(%q<sprockets-rails>, [">= 2.0.0"])
else
s.add_dependency(%q<activesupport>, ["= 5.1.5"])
s.add_dependency(%q<actionpack>, ["= 5.1.5"])
s.add_dependency(%q<actionview>, ["= 5.1.5"])
s.add_dependency(%q<activemodel>, ["= 5.1.5"])
s.add_dependency(%q<activerecord>, ["= 5.1.5"])
s.add_dependency(%q<actionmailer>, ["= 5.1.5"])
s.add_dependency(%q<activejob>, ["= 5.1.5"])
s.add_dependency(%q<actioncable>, ["= 5.1.5"])
s.add_dependency(%q<railties>, ["= 5.1.5"])
s.add_dependency(%q<bundler>, [">= 1.3.0"])
s.add_dependency(%q<sprockets-rails>, [">= 2.0.0"])
end
else
s.add_dependency(%q<activesupport>, ["= 5.1.5"])
s.add_dependency(%q<actionpack>, ["= 5.1.5"])
s.add_dependency(%q<actionview>, ["= 5.1.5"])
s.add_dependency(%q<activemodel>, ["= 5.1.5"])
s.add_dependency(%q<activerecord>, ["= 5.1.5"])
s.add_dependency(%q<actionmailer>, ["= 5.1.5"])
s.add_dependency(%q<activejob>, ["= 5.1.5"])
s.add_dependency(%q<actioncable>, ["= 5.1.5"])
s.add_dependency(%q<railties>, ["= 5.1.5"])
s.add_dependency(%q<bundler>, [">= 1.3.0"])
s.add_dependency(%q<sprockets-rails>, [">= 2.0.0"])
end
end
| 44.609375 | 200 | 0.629772 |
f78112f701beda2167004393e2e41165a8118aa6 | 1,227 | require 'spec_helper'
describe Travis::Build::Script::C, :sexp do
let(:data) { payload_for(:push, :c) }
let(:script) { described_class.new(data) }
subject { script.sexp }
it { store_example }
it_behaves_like 'a bash script'
it_behaves_like 'compiled script' do
let(:code) { ['TRAVIS_LANGUAGE=c'] }
let(:cmds) { ['./configure && make && make test'] }
end
it_behaves_like 'a build script sexp'
it 'sets CC' do
should include_sexp [:export, ['CC', 'gcc'], echo: true]
end
it 'announces gcc --version' do
should include_sexp [:cmd, 'gcc --version', echo: true]
end
it 'runs ./configure && make && make test' do
should include_sexp [:cmd, './configure && make && make test', echo: true, timing: true]
end
describe '#cache_slug' do
subject { described_class.new(data).cache_slug }
it { should eq("cache-#{CACHE_SLUG_EXTRAS}--compiler-gcc") }
end
context 'when cache requires ccache' do
let(:data) { payload_for(:push, :c, config: { cache: 'ccache' }) }
describe '#export' do
it 'prepends /usr/lib/ccache to PATH' do
should include_sexp [:export, ['PATH', '/usr/lib/ccache:$PATH'], echo: true]
end
end
end
end
| 26.673913 | 92 | 0.629177 |
21b1fd2c2eac2260bf7639845ed8ad1fdd7532a5 | 848 | control 'cis-docker-benchmark-2.7' do
impact 1.0
title 'Set default ulimit as appropriate'
desc 'ulimit provides control over the resources available to the shell and to processes started by it. Setting system resource limits judiciously saves you from many disasters such as a fork bomb. Sometimes, even friendly users and legitimate processes can overuse system resources and in-turn can make the system unusable.'
tag 'Bug: default-ulimits seems broken in daemon.json https://github.com/docker/docker/issues/22309'
ref 'https://docs.docker.com/engine/reference/commandline/daemon/#default-ulimits'
describe json('/etc/docker/daemon.json') do
its(['default-ulimits', 'nproc']) { should eq('1024:2408') }
end
describe json('/etc/docker/daemon.json') do
its(['default-ulimits', 'nofile']) { should eq('100:200') }
end
end | 60.571429 | 327 | 0.751179 |
d5b4cae49227128ee34dcde26ad2cf77bacf135d | 326 | class FixFeedbackStringSearchCriteria < ActiveRecord::Migration[6.1]
def change
Feedback.find_each do |f|
next unless f.search_criteria.is_a?(String)
f.update_columns(search_criteria: JSON.parse(f.search_criteria))
rescue JSON::ParserError
f.update_columns(search_criteria: nil)
end
end
end
| 27.166667 | 70 | 0.742331 |
e225c42891bde45ad9d62bdae3f9e95b6d192c43 | 546 | require 'rails_helper'
RSpec.describe Point::Info, type: :model do
ginza = ::StationFacility::Info.find_by( same_as: "odpt.StationFacility:TokyoMetro.Ginza" )
kanda = ::StationFacility::Info.find_by( same_as: "odpt.StationFacility:TokyoMetro.Kanda" )
point_infos_in_ginza = ginza.point_infos
point_infos_in_kanda = kanda.point_infos
common_point_info_ids = ( point_infos_in_ginza.pluck( :id ) & point_infos_in_kanda.pluck( :id ) )
it "has invalid info of point info." do
expect( common_point_info_ids ).to be_blank
end
end
| 32.117647 | 99 | 0.760073 |
f8210265ef2afd296514cfb461fd08c21ee28be4 | 1,776 | module Fog
module Network
class OpenStack
class Real
def get_port(port_id)
request(
:expects => [200],
:method => 'GET',
:path => "ports/#{port_id}"
)
end
end
class Mock
def get_port(port_id)
response = Excon::Response.new
if data = self.data[:ports][port_id]
response.status = 200
response.body = {
'port' => {
'id' => '5c81d975-5fea-4674-9c1f-b8aa10bf9a79',
'name' => 'port_1',
'network_id' => 'e624a36d-762b-481f-9b50-4154ceb78bbb',
'fixed_ips' => [
{
'ip_address' => '10.2.2.2',
'subnet_id' => '2e4ec6a4-0150-47f5-8523-e899ac03026e',
}
],
'mac_address' => 'fa:16:3e:62:91:7f',
'status' => 'ACTIVE',
'admin_state_up' => true,
'device_id' => 'dhcp724fc160-2b2e-597e-b9ed-7f65313cd73f-e624a36d-762b-481f-9b50-4154ceb78bbb',
'device_owner' => 'network:dhcp',
'tenant_id' => 'f8b26a6032bc47718a7702233ac708b9',
'security_groups' => ['3ddde803-e550-4737-b5de-0862401dc834'],
'allowed_address_pairs' => [
'ip_address' => '10.1.1.1',
'mac_address' => 'fa:16:3e:3d:2a:cc'
]
}
}
response
else
raise Fog::Network::OpenStack::NotFound
end
end
end
end
end
end
| 34.153846 | 123 | 0.413288 |
f89b91eae2ca1709fc8d83f4dbe70a523b565cff | 550 | require 'spec_helper'
module Opbeat
RSpec.describe Filter do
let(:config) { Configuration.new filter_parameters: [/password/, 'passwd'] }
subject do
Filter.new config
end
describe "#apply" do
it "filters a string" do
filtered = subject.apply "password=SECRET&foo=bar"
expect(filtered).to eq 'password=[FILTERED]&foo=bar'
end
it "filters a hash" do
filtered = subject.apply({ passwd: 'SECRET' })
expect(filtered).to eq({ passwd: '[FILTERED]' })
end
end
end
end
| 22.916667 | 80 | 0.621818 |
01bed77fcdfe3420d66c989218c0060a84a997ed | 1,385 | # frozen_string_literal: true
require_relative '../lib/aspose-email-cloud'
require 'securerandom'
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.shared_context_metadata_behavior = :apply_to_host_groups
config.formatter = :documentation
end
include AsposeEmailCloud
RSpec.shared_context 'spec base', shared_context: :metadata do
before(:all) do
api_base_url = ENV['apiBaseUrl']
@api = EmailCloud.new(ENV['clientSecret'], ENV['clientId'], api_base_url)
auth_url = ENV['authUrl']
if auth_url
@api.api_invoker.api_client.config.scheme = 'http' if api_base_url.include? 'http:'
@api.api_invoker.api_client.config.auth_url = auth_url
end
@api.api_invoker.api_client.config.logger.level = 'warn'
@folder = SecureRandom.uuid.to_s
@storage = 'First Storage'
@api.cloud_storage.folder.create_folder(
CreateFolderRequest.new(path: @folder, storage_name: @storage))
end
after(:all) do
@api.cloud_storage.folder.delete_folder(
DeleteFolderRequest.new(path: @folder, storage_name: @storage, recursive: true))
end
def storage_folder
StorageFolderLocation.new(storage: @storage, folder_path: @folder)
end
end
| 30.108696 | 89 | 0.745848 |
e919ac761de333f03cd90d45403db4bfa58b819c | 966 | require 'spec_helper'
describe MetaTags::ViewHelper, 'displaying Open Search meta tags' do
subject { ActionView::Base.new }
it 'should display meta tags specified with :open_search' do
subject.set_meta_tags(open_search: {
title: 'Open Search Title',
href: '/open_search_path.xml'
})
subject.display_meta_tags(site: 'someSite').tap do |meta|
expect(meta).to have_tag('link', with: {
href: '/open_search_path.xml',
rel: 'search',
title: 'Open Search Title',
type: 'application/opensearchdescription+xml',
})
end
end
it 'should not display meta tags without content' do
subject.set_meta_tags(open_search: {
title: '',
href: '',
})
subject.display_meta_tags(site: 'someSite').tap do |meta|
expect(meta).to_not have_tag('link', with: {
rel: 'search',
type: 'application/opensearchdescription+xml',
})
end
end
end
| 28.411765 | 68 | 0.628364 |
089cb32f5a26a04ed2fc47a9bdb647c14b7b5486 | 6,017 | #
# Cookbook Name:: L7-mongo
# Provider:: db
#
# Copyright 2016, Gabor Szelcsanyi <[email protected]>
def whyrun_supported?
true
end
action :remove do
service new_resource.name do
action [:stop, :disable]
end
directory "#{new_resource.home}/mongodb-#{new_resource.name}" do
action :delete
recursive true
end
file "/etc/init.d/mongodb-#{new_resource.name}" do
action :delete
end
file "/etc/logrotate.d/mongodb-#{new_resource.name}-logs" do
action :delete
end
cron_d "mongodb-#{new_resource.name}-monitoring" do
action :delete
end
file "/tmp/mongodb-monitoring-status-#{new_resource.port}" do
action :delete
end
end
action :create do
Chef::Log.info("MongoDB binary: #{new_resource.name}")
base = "#{new_resource.home}/mongodb-#{new_resource.name}"
group new_resource.group do
action :create
system true
end
user new_resource.user do
gid new_resource.group
shell '/bin/false'
home '/tmp'
system true
action :create
only_if do
::File.readlines('/etc/passwd').grep(/^mongodb/).size <= 0
end
end
directory base do
owner 'root'
group 'root'
mode '0755'
action :create
recursive true
end
%w[etc data log var tools].each do |dirname|
directory "#{base}/#{dirname}" do
owner new_resource.user
group new_resource.group
mode '0750'
action :create
recursive false
end
end
t = template "#{base}/etc/mongodb.conf" do
source 'etc/mongodb.conf.erb'
cookbook 'L7-mongo'
owner 'root'
group 'root'
mode '0644'
variables(
name: new_resource.name,
port: new_resource.port,
bind_ip: new_resource.bind_ip,
socket: base + '/var',
pidfile: base + '/var/mongodb.pid',
log: base + '/log/mongodb.log',
datadir: base + '/data',
replSet: new_resource.replSet,
notablescan: new_resource.notablescan,
smallfiles: new_resource.smallfiles,
journal: new_resource.journal,
rest: new_resource.rest,
httpinterface: new_resource.httpinterface,
auth: new_resource.auth
)
end
new_resource.updated_by_last_action(t.updated_by_last_action?)
if new_resource.rest
cron_d "mongodb-#{new_resource.name}-monitoring" do
hour '*'
minute '*'
day '*'
month '*'
weekday '*'
command "if timeout 3 /usr/bin/wget --timeout=15 --tries=2 --quiet \
-O /tmp/mongodb-monitoring-status-#{new_resource.port}.tmp \
http://127.0.0.1:#{new_resource.port.to_i + 1000}/_status &> /dev/null; \
then \
sleep 1; mv /tmp/mongodb-monitoring-status-#{new_resource.port}.tmp \
/tmp/mongodb-monitoring-status-#{new_resource.port}; \
else \
rm -f /tmp/mongodb-monitoring-status-#{new_resource.port}.tmp; fi"
user 'root'
shell '/bin/bash'
end
else
cron_d "mongodb-#{new_resource.name}-monitoring" do
action :delete
end
file "/tmp/mongodb-monitoring-status-#{new_resource.port}" do
action :delete
end
end
%w[wget numactl pigz jq].each do |pkg|
package pkg do
action :install
end
end
filename = ::File.basename(new_resource.url)
dirname = filename.sub(/(\.tar\.gz$|\.tgz$)/, '')
bash 'get_mongodb_binary' do
user 'root'
cwd base
code <<-EOH
wget --no-check-certificate #{new_resource.url}
tar -zxf #{filename}
EOH
not_if do
::File.exist?(base + '/' + filename)
end
end
link "#{base}/current" do
to "#{base}/#{dirname}"
link_type :symbolic
end
t = template "/etc/init.d/mongodb-#{new_resource.name}" do
source 'etc/init.d/mongodb-init.erb'
cookbook 'L7-mongo'
owner 'root'
group 'root'
mode '0755'
variables(
daemon: "#{base}/current/bin/mongod",
datadir: "#{base}/data",
config: "#{base}/etc/mongodb.conf",
name: "mongodb-#{new_resource.name}",
pid: "#{base}/var/mongodb.pid"
)
end
new_resource.updated_by_last_action(t.updated_by_last_action?)
service "mongodb-#{new_resource.name}" do
action :enable
supports status: true, restart: true
end
t = template "/etc/logrotate.d/mongodb-#{new_resource.name}-logs" do
source 'etc/logrotate.d/mongodb-logs.erb'
cookbook 'L7-mongo'
owner 'root'
group 'root'
mode '0644'
variables(
cpath: "#{base}/log"
)
end
new_resource.updated_by_last_action(t.updated_by_last_action?)
t = template "#{base}/tools/backup_mongodb.sh" do
source 'tools/backup_mongodb.sh.erb'
cookbook 'L7-mongo'
owner 'root'
group 'root'
mode '0755'
variables(base: base,
name: new_resource.name,
port: new_resource.port,
backup_user: new_resource.backup_user,
backup_host: new_resource.backup_host,
backup_path: new_resource.backup_path,
backup_port: new_resource.backup_port)
end
new_resource.updated_by_last_action(t.updated_by_last_action?)
template base + '/tools/backup_rsa' do
source 'tools/backup_rsa.erb'
cookbook 'L7-mongo'
mode '0600'
owner 'root'
group 'root'
variables(privkey: new_resource.backup_privkey)
end
template base + '/tools/backup_rsa.pub' do
source 'tools/backup_rsa.pub.erb'
cookbook 'L7-mongo'
mode '0644'
owner 'root'
group 'root'
variables(pubkey: new_resource.backup_pubkey)
end
if new_resource.backup
cron_d "#{new_resource.name}-backup-mongodb" do
hour new_resource.backup_hour
minute new_resource.backup_minute
day '*'
month '*'
weekday '*'
command "#{base}/tools/backup_mongodb.sh >> \
#{base}/log/backup-mongodb-#{new_resource.name}.log 2>&1"
user 'root'
shell '/bin/bash'
end
else
cron_d "#{new_resource.name}-backup-mongodb" do
action :delete
end
end
if new_resource.default_instance
link '/usr/bin/mongo' do
to "#{base}/current/bin/mongo"
link_type :symbolic
end
end
end
| 24.262097 | 74 | 0.646335 |
6a4ec8f75c5b626b5a6405b5099b2398dee5d309 | 459 | cask 'eclipse-installer' do
version '4.7.0,oxygen:R'
sha256 '610b28ad30fc9ba044c87cca87ef66abdbe938d3ea50d112a81e36f953a72c0e'
url "https://eclipse.org/downloads/download.php?file=/oomph/epp/#{version.after_comma.before_colon}/#{version.after_colon}/eclipse-inst-mac64.tar.gz&r=1"
name 'Eclipse Installer'
homepage 'https://eclipse.org/'
depends_on macos: '>= :leopard'
app 'Eclipse Installer.app'
caveats do
depends_on_java
end
end
| 27 | 155 | 0.755991 |
bfe761e85a99998e9dfe20948ff50df724bf4928 | 1,899 | describe Spaceship::Portal::Persons do
before { Spaceship.login }
let(:client) { Spaceship::Persons.client }
it "should factor a new person object" do
joined = "2016-06-20T06:30:26Z"
attrs = {
"personId" => "1234",
"firstName" => "Helmut",
"lastName" => "Januschka",
"email" => "[email protected]",
"developerStatus" => "active",
"dateJoined" => joined,
"teamMemberId" => "1234"
}
person = Spaceship::Portal::Person.factory(attrs)
expect(person.email_address).to eq("[email protected]")
expect(person.joined).to eq(Time.parse(joined))
end
it "should be OK if person date format changes to timestamp" do
joined = 1_501_106_986_000
attrs = {
"personId" => "1234",
"firstName" => "Helmut",
"lastName" => "Januschka",
"email" => "[email protected]",
"developerStatus" => "active",
"dateJoined" => joined,
"teamMemberId" => "1234"
}
person = Spaceship::Portal::Person.factory(attrs)
expect(person.joined).to eq(joined)
end
it "should be OK if person date format is unparseable" do
joined = "This is clearly not a timestamp"
attrs = {
"personId" => "1234",
"firstName" => "Helmut",
"lastName" => "Januschka",
"email" => "[email protected]",
"developerStatus" => "active",
"dateJoined" => joined,
"teamMemberId" => "1234"
}
person = Spaceship::Portal::Person.factory(attrs)
expect(person.joined).to eq(joined)
end
it "should remove a member" do
expect(client).to receive(:team_remove_member!).with("5M8TWKRZ3J")
person = Spaceship::Portal::Persons.find("[email protected]")
person.remove!
end
it "should change role" do
person = Spaceship::Portal::Persons.find("[email protected]")
expect { person.change_role("member") }.to_not(raise_error)
end
end
| 31.131148 | 70 | 0.627172 |
186681d3cd3b4524ca385dd78ff8c71ef19092ce | 5,334 | # require 'geo_ruby/simple_features/point'
# require 'geo_ruby/simple_features/line_string'
# require 'geo_ruby/simple_features/linear_ring'
# require 'geo_ruby/simple_features/polygon'
# require 'geo_ruby/simple_features/multi_point'
# require 'geo_ruby/simple_features/multi_line_string'
# require 'geo_ruby/simple_features/multi_polygon'
# require 'geo_ruby/simple_features/geometry_collection'
# require 'geo_ruby/simple_features/envelope'
module GeoRuby
#Raised when an error in the GeoRSS string is detected
class GeorssFormatError < StandardError
end
#Contains tags possibly found on GeoRss Simple geometries
class GeorssTags < Struct.new(:featuretypetag,:relationshiptag,:elev,:floor,:radius)
end
#Parses GeoRSS strings
#You can also use directly the static method Geometry.from_georss
class GeorssParser
attr_reader :georss_tags, :geometry
#Parses the georss geometry passed as argument and notifies the factory of events
#The parser assumes
def parse(georss,with_tags = false)
@geometry = nil
@georss_tags = GeorssTags.new
parse_geometry(georss,with_tags)
end
private
def parse_geometry(georss,with_tags)
georss.strip!
#check for W3CGeo first
if georss =~ /<[^:>]*:lat\s*>([^<]*)</
#if valid, it is W3CGeo
lat = $1.to_f
if georss =~ /<[^:>]*:long\s*>([^<]*)</
lon = $1.to_f
@geometry = Point.from_x_y(lon,lat)
else
raise GeorssFormatError.new("Bad W3CGeo GeoRSS format")
end
elsif georss =~ /^<\s*[^:>]*:where\s*>/
#GML format found
gml = $'.strip
if gml =~ /^<\s*[^:>]*:Point\s*>/
#gml point
if gml =~ /<\s*[^:>]*:pos\s*>([^<]*)/
point = $1.split(" ")
#lat comes first
@geometry = Point.from_x_y(point[1].to_f,point[0].to_f)
else
raise GeorssFormatError.new("Bad GML GeoRSS format: Malformed Point")
end
elsif gml =~ /^<\s*[^:>]*:LineString\s*>/
if gml =~ /<\s*[^:>]*:posList\s*>([^<]*)/
xy = $1.split(" ")
@geometry = LineString.new
0.upto(xy.size/2 - 1) { |index| @geometry << Point.from_x_y(xy[index*2 + 1].to_f,xy[index*2].to_f)}
else
raise GeorssFormatError.new("Bad GML GeoRSS format: Malformed LineString")
end
elsif gml =~ /^<\s*[^:>]*:Polygon\s*>/
if gml =~ /<\s*[^:>]*:posList\s*>([^<]*)/
xy = $1.split(" ")
@geometry = Polygon.new
linear_ring = LinearRing.new
@geometry << linear_ring
xy = $1.split(" ")
0.upto(xy.size/2 - 1) { |index| linear_ring << Point.from_x_y(xy[index*2 + 1].to_f,xy[index*2].to_f)}
else
raise GeorssFormatError.new("Bad GML GeoRSS format: Malformed Polygon")
end
elsif gml =~ /^<\s*[^:>]*:Envelope\s*>/
if gml =~ /<\s*[^:>]*:lowerCorner\s*>([^<]*)</
lc = $1.split(" ").collect { |x| x.to_f}.reverse
if gml =~ /<\s*[^:>]*:upperCorner\s*>([^<]*)</
uc = $1.split(" ").collect { |x| x.to_f}.reverse
@geometry = Envelope.from_coordinates([lc,uc])
else
raise GeorssFormatError.new("Bad GML GeoRSS format: Malformed Envelope")
end
else
raise GeorssFormatError.new("Bad GML GeoRSS format: Malformed Envelope")
end
else
raise GeorssFormatError.new("Bad GML GeoRSS format: Unknown geometry type")
end
else
#must be simple format
if georss =~ /^<\s*[^>:]*:point([^>]*)>(.*)</m
tags = $1
point = $2.gsub(","," ").split(" ")
@geometry = Point.from_x_y(point[1].to_f,point[0].to_f)
elsif georss =~ /^<\s*[^>:]*:line([^>]*)>(.*)</m
tags = $1
@geometry = LineString.new
xy = $2.gsub(","," ").split(" ")
0.upto(xy.size/2 - 1) { |index| @geometry << Point.from_x_y(xy[index*2 + 1].to_f,xy[index*2].to_f)}
elsif georss =~ /^<\s*[^>:]*:polygon([^>]*)>(.*)</m
tags = $1
@geometry = Polygon.new
linear_ring = LinearRing.new
@geometry << linear_ring
xy = $2.gsub(","," ").split(" ")
0.upto(xy.size/2 - 1) { |index| linear_ring << Point.from_x_y(xy[index*2 + 1].to_f,xy[index*2].to_f)}
elsif georss =~ /^<\s*[^>:]*:box([^>]*)>(.*)</m
tags = $1
corners = []
xy = $2.gsub(","," ").split(" ")
0.upto(xy.size/2 - 1) {|index| corners << Point.from_x_y(xy[index*2 + 1].to_f,xy[index*2].to_f)}
@geometry = Envelope.from_points(corners)
else
raise GeorssFormatError.new("Bad Simple GeoRSS format: Unknown geometry type")
end
#geometry found: parse tags
return unless with_tags
@georss_tags.featuretypetag = $1 if tags =~ /featuretypetag=['"]([^"']*)['"]/
@georss_tags.relationshiptag = $1 if tags =~ /relationshiptag=['"]([^'"]*)['"]/
@georss_tags.elev = $1.to_f if tags =~ /elev=['"]([^'"]*)['"]/
@georss_tags.floor = $1.to_i if tags =~ /floor=['"]([^'"]*)['"]/
@georss_tags.radius = $1.to_f if tags =~ /radius=['"]([^'"]*)['"]/
end
end
end
end
| 39.80597 | 113 | 0.546494 |
21d929077adb1c3653b1e4e1c2629cbfd54e2aee | 14,536 | =begin
#The Plaid API
#The Plaid REST API. Please see https://plaid.com/docs/api for more details.
The version of the OpenAPI document: 2020-09-14_1.31.1
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.1.0
=end
require 'date'
require 'time'
module Plaid
# Defines the request schema for `/bank_transfer/event/list`
class BankTransferEventListRequest
# Your Plaid API `client_id`. The `client_id` is required and may be provided either in the `PLAID-CLIENT-ID` header or as part of a request body.
attr_accessor :client_id
# Your Plaid API `secret`. The `secret` is required and may be provided either in the `PLAID-SECRET` header or as part of a request body.
attr_accessor :secret
# The start datetime of bank transfers to list. This should be in RFC 3339 format (i.e. `2019-12-06T22:35:49Z`)
attr_accessor :start_date
# The end datetime of bank transfers to list. This should be in RFC 3339 format (i.e. `2019-12-06T22:35:49Z`)
attr_accessor :end_date
# Plaid’s unique identifier for a bank transfer.
attr_accessor :bank_transfer_id
# The account ID to get events for all transactions to/from an account.
attr_accessor :account_id
# The type of bank transfer. This will be either `debit` or `credit`. A `debit` indicates a transfer of money into your origination account; a `credit` indicates a transfer of money out of your origination account.
attr_accessor :bank_transfer_type
# Filter events by event type.
attr_accessor :event_types
# The maximum number of bank transfer events to return. If the number of events matching the above parameters is greater than `count`, the most recent events will be returned.
attr_accessor :count
# The offset into the list of bank transfer events. When `count`=25 and `offset`=0, the first 25 events will be returned. When `count`=25 and `offset`=25, the next 25 bank transfer events will be returned.
attr_accessor :offset
# The origination account ID to get events for transfers from a specific origination account.
attr_accessor :origination_account_id
# Indicates the direction of the transfer: `outbound`: for API-initiated transfers `inbound`: for payments received by the FBO account.
attr_accessor :direction
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'client_id' => :'client_id',
:'secret' => :'secret',
:'start_date' => :'start_date',
:'end_date' => :'end_date',
:'bank_transfer_id' => :'bank_transfer_id',
:'account_id' => :'account_id',
:'bank_transfer_type' => :'bank_transfer_type',
:'event_types' => :'event_types',
:'count' => :'count',
:'offset' => :'offset',
:'origination_account_id' => :'origination_account_id',
:'direction' => :'direction'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'client_id' => :'String',
:'secret' => :'String',
:'start_date' => :'Time',
:'end_date' => :'Time',
:'bank_transfer_id' => :'String',
:'account_id' => :'String',
:'bank_transfer_type' => :'String',
:'event_types' => :'Array<BankTransferEventType>',
:'count' => :'Integer',
:'offset' => :'Integer',
:'origination_account_id' => :'String',
:'direction' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
:'start_date',
:'end_date',
:'bank_transfer_id',
:'account_id',
:'bank_transfer_type',
:'count',
:'offset',
:'origination_account_id',
:'direction'
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `Plaid::BankTransferEventListRequest` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `Plaid::BankTransferEventListRequest`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'client_id')
self.client_id = attributes[:'client_id']
end
if attributes.key?(:'secret')
self.secret = attributes[:'secret']
end
if attributes.key?(:'start_date')
self.start_date = attributes[:'start_date']
end
if attributes.key?(:'end_date')
self.end_date = attributes[:'end_date']
end
if attributes.key?(:'bank_transfer_id')
self.bank_transfer_id = attributes[:'bank_transfer_id']
end
if attributes.key?(:'account_id')
self.account_id = attributes[:'account_id']
end
if attributes.key?(:'bank_transfer_type')
self.bank_transfer_type = attributes[:'bank_transfer_type']
end
if attributes.key?(:'event_types')
if (value = attributes[:'event_types']).is_a?(Array)
self.event_types = value
end
end
if attributes.key?(:'count')
self.count = attributes[:'count']
else
self.count = 25
end
if attributes.key?(:'offset')
self.offset = attributes[:'offset']
else
self.offset = 0
end
if attributes.key?(:'origination_account_id')
self.origination_account_id = attributes[:'origination_account_id']
end
if attributes.key?(:'direction')
self.direction = attributes[:'direction']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if [email protected]? && @count > 25
invalid_properties.push('invalid value for "count", must be smaller than or equal to 25.')
end
if [email protected]? && @count < 1
invalid_properties.push('invalid value for "count", must be greater than or equal to 1.')
end
if [email protected]? && @offset < 0
invalid_properties.push('invalid value for "offset", must be greater than or equal to 0.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
bank_transfer_type_validator = EnumAttributeValidator.new('String', ["debit", "credit", "null"])
return false unless bank_transfer_type_validator.valid?(@bank_transfer_type)
return false if [email protected]? && @count > 25
return false if [email protected]? && @count < 1
return false if [email protected]? && @offset < 0
direction_validator = EnumAttributeValidator.new('String', ["inbound", "outbound", "null"])
return false unless direction_validator.valid?(@direction)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] bank_transfer_type Object to be assigned
def bank_transfer_type=(bank_transfer_type)
validator = EnumAttributeValidator.new('String', ["debit", "credit", "null"])
unless validator.valid?(bank_transfer_type)
fail ArgumentError, "invalid value for \"bank_transfer_type\", must be one of #{validator.allowable_values}."
end
@bank_transfer_type = bank_transfer_type
end
# Custom attribute writer method with validation
# @param [Object] count Value to be assigned
def count=(count)
if !count.nil? && count > 25
fail ArgumentError, 'invalid value for "count", must be smaller than or equal to 25.'
end
if !count.nil? && count < 1
fail ArgumentError, 'invalid value for "count", must be greater than or equal to 1.'
end
@count = count
end
# Custom attribute writer method with validation
# @param [Object] offset Value to be assigned
def offset=(offset)
if !offset.nil? && offset < 0
fail ArgumentError, 'invalid value for "offset", must be greater than or equal to 0.'
end
@offset = offset
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] direction Object to be assigned
def direction=(direction)
validator = EnumAttributeValidator.new('String', ["inbound", "outbound", "null"])
unless validator.valid?(direction)
fail ArgumentError, "invalid value for \"direction\", must be one of #{validator.allowable_values}."
end
@direction = direction
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
client_id == o.client_id &&
secret == o.secret &&
start_date == o.start_date &&
end_date == o.end_date &&
bank_transfer_id == o.bank_transfer_id &&
account_id == o.account_id &&
bank_transfer_type == o.bank_transfer_type &&
event_types == o.event_types &&
count == o.count &&
offset == o.offset &&
origination_account_id == o.origination_account_id &&
direction == o.direction
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[client_id, secret, start_date, end_date, bank_transfer_id, account_id, bank_transfer_type, event_types, count, offset, origination_account_id, direction].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = Plaid.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 33.416092 | 219 | 0.633255 |
ede8c246d942661f5933bb02c8362021479c5fce | 307 | class Article < ApplicationRecord
belongs_to :user
has_many :article_categories
has_many :categories, through: :article_categories
validates :title, presence: true, length: { minimum: 5 }
validates :description, presence: true, length: { minimum: 100 }
validates :user_id, presence: true
end
| 30.7 | 66 | 0.752443 |
6a2da372709ff8ecab5934399fcbed4e1c34695d | 922 | # frozen_string_literal: true
require_relative '../dictionary_key'
require_relative '../services/dictionary_metadata_service'
require_relative 'dictionary_cache_validatable'
require_relative 'dictionary_metadata_validatable'
module LittleWeasel
module Modules
# This module defines methods and attributes to consume the dictionary
# metadata service.
module DictionaryMetadataServicable
include DictionaryKeyable
include DictionaryCacheValidatable
include DictionaryMetadataValidatable
attr_reader :dictionary_cache, :dictionary_key, :dictionary_metadata
def dictionary_metadata_service
Services::DictionaryMetadataService.new(dictionary_key: dictionary_key, dictionary_cache: dictionary_cache,
dictionary_metadata: dictionary_metadata)
end
private
attr_writer :dictionary_cache, :dictionary_key, :dictionary_metadata
end
end
end
| 30.733333 | 115 | 0.793926 |
087a7446be6d46972931c8169a23c056c9228fb4 | 149 | class AddPublicFlgagToAccount < ActiveRecord::Migration[5.2]
def change
add_column :accounts, :public_site, :boolean, default: false
end
end
| 24.833333 | 64 | 0.765101 |
e9ba32b3306ac5e4a59d14ecae41923fc07130dd | 1,281 | =begin
#Selling Partner API for Catalog Items
#The Selling Partner API for Catalog Items helps you programmatically retrieve item details for items in the catalog.
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.24
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AmzSpApi::CatalogItemsApiModel::CreatorType
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'CreatorType' do
before do
# run before each test
@instance = AmzSpApi::CatalogItemsApiModel::CreatorType.new
end
after do
# run after each test
end
describe 'test an instance of CreatorType' do
it 'should create an instance of CreatorType' do
expect(@instance).to be_instance_of(AmzSpApi::CatalogItemsApiModel::CreatorType)
end
end
describe 'test attribute "value"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "role"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.255319 | 117 | 0.746292 |
38b38f4be5d41f33e42feeac36af45b603b96f58 | 3,409 | describe "NRSER.transform" do
subject { NRSER.transform tree, source }
describe_section "Simple Examples" do
# ========================================================================
describe "value swap in {x: 'ex', y: 'why?'}" do
let( :tree ) {
{
x: [ :y ].to_proc,
y: [ :x ].to_proc,
}
} # let :tree
let( :source ) {
{
x: 'ex',
y: 'why?'
}
} # let :source
it { is_expected.to eq x: 'why?', y: 'ex' }
end # value swap in {x: 'ex', y: 'why?'}
describe "transform in key" do
let :tree do
{
users: {
{ id: [ :id ].to_proc } => {
name: [ :name ].to_proc,
}
}
}
end
let :source do
{
id: 123,
name: "Mr. Cat",
}
end
it do
is_expected.to eq \
users: {
{ id: 123 } => {
name: "Mr. Cat",
}
}
end
end # transform in key
describe "arrays in tree" do
let :tree do
{
list: [
{ name: [ :name ].to_proc },
{ age: [ :age ].to_proc },
]
}
end
let :source do
{
name: 'Mr. Cat',
age: 2,
}
end
it do
is_expected.to eq \
list: [
{ name: 'Mr. Cat' },
{ age: 2 },
]
end
end # arrays in tree
end # section simple examples
# ************************************************************************
describe_section "Real-World Examples" do
# ========================================================================
describe "Address" do
let :contact do
OpenStruct.new id: 123
end
let :address do
OpenStruct.new \
id: 987,
parent: contact,
address: "东城区",
street2: "民安小区",
city: "北京市",
state: "北京",
zip: "100010"
end
let :source do
address
end
let :tree do
{
users: {
{ contact_id: [ :parent, :id ].to_chainer } => {
addresses: {
{
address_id: [ :id ].to_sender } => {
district: [:address ].to_proc,
line_1: [:street2 ].to_proc,
city: [:city ].to_proc,
province: [:state ].to_proc,
post_code: [:zip ].to_proc,
}
}
}
}
}
end
it do
is_expected.to eq \
users: {
{ contact_id: 123 } => {
addresses: {
{ address_id: 987 } => {
district: "东城区",
line_1: "民安小区",
city: "北京市",
province: "北京",
post_code: "100010",
}
}
}
}
end
end # Address
end # section Real-World Examples
# ************************************************************************
end # NRSER.transform
| 21.713376 | 76 | 0.315928 |
d5076ec283f0aad509e48f47b0134d4050be7a3f | 294 | require 'test_queue'
require 'test_queue/runner/minitest'
class SleepyTestRunner < TestQueue::Runner::MiniTest
def after_fork(num)
if ENV['SLEEP_AS_RELAY'] && relay?
sleep 5
elsif ENV['SLEEP_AS_MASTER'] && !relay?
sleep 5
end
end
end
SleepyTestRunner.new.execute
| 19.6 | 52 | 0.704082 |
26bf529738fda38e264640cc1b800318752f4a7d | 2,808 | require 'gli'
require 'yaml'
require 'colorize'
require 'meetup-cli/api'
require 'meetup-cli/version'
APP_NAME = File.basename $0, File.extname($0)
CONFIG_FILE = File.join(ENV['HOME'], ".#{APP_NAME}rc")
# Use the GLI command line parser to create this CLI app
# https://github.com/davetron5000/gli
#
include GLI::App
program_desc "Meetup command line interface"
version MCLI::VERSION
default_command :going
sort_help :manually
switch :color, :desc => 'Force colorized output', :negatable => false
pre do |global_options|
# Exit without a stack trace when terminating due to a broken pipe
Signal.trap "PIPE", "SYSTEM_DEFAULT" if Signal.list.include? "PIPE"
String.disable_colorization(true) unless STDOUT.isatty or global_options['color']
begin
$config = YAML.load_file(CONFIG_FILE)
throw if $config['api_key'].nil?
rescue
exit_now! <<-EOM
It looks like you are running #{APP_NAME} for the first time.
Obtain an API key from: https://secure.meetup.com/meetup_api/key/
And save the following in ~/.#{APP_NAME}rc:
api_key: <api-key>
EOM
end
true # Success
end
on_error do |exception|
puts exception.message
# Suppress GLI's built-in error handling
false
end
# Human friendly date/time in user's time zone (almost RFC 2822)
def date_str(date)
date.to_time.strftime('%a, %-d %b %Y %H:%M:%S %Z')
end
def print_event_details(event)
puts "#{event.name.light_blue}" +
(/pizza/i.match(event.description) ? " 🍕" : "") +
(/(beer|drinks)/i.match(event.description) ? " 🍺" : "") +
(/wine/i.match(event.description) ? " 🍷" : "")
puts " #{"URL:".magenta} #{event.event_url}"
puts " #{"Date:".magenta} #{date_str(event.time)}"
puts " #{"Where:".magenta} #{(event.venue.name.nil? ? "Not specified" : "#{event.venue.address_1}, #{event.venue.city}, #{event.venue.state} (#{event.venue.name.colorize(:green)})")}"
end
def query_events(attendance)
MCLI::send('get_' + attendance + '_events').each do |event|
print_event_details event
puts
end
end
desc "List upcoming meetups you are going to (default command)"
command :going do |c|
c.action do
query_events c.name.to_s
end
end
desc "List upcoming meetups you are not going to"
command :notgoing do |c|
c.action do
query_events c.name.to_s
end
end
desc "List upcoming meetups you RSVP'ed (yes or no) to"
command :upcoming do |c|
c.action do
query_events c.name.to_s
end
end
desc "List past meetups you went to"
command :went do |c|
c.action do
query_events c.name.to_s
end
end
desc "List past meetups you didn't go to"
command :didntgo do |c|
c.action do
query_events c.name.to_s
end
end
desc "List past meetups you RSVP'ed (yes or no) to"
command :past do |c|
c.action do
query_events c.name.to_s
end
end
exit run(ARGV) | 24.206897 | 186 | 0.691952 |
f7a50e4225af78ce4c11c3019f96a3fd03708d69 | 709 | require 'spec_helper'
describe Rounders::Stores::Memory do
let(:described_class) { Rounders::Stores::Memory }
let(:described_instance) { described_class.new(*args) }
let(:args) { [] }
describe '#initialize' do
subject { described_instance }
it { is_expected.to be_a described_class }
end
describe '#initialize' do
subject { described_instance }
it { is_expected.to be_a described_class }
end
describe 'data' do
it 'should be return nil' do
expect(described_instance[:empty_data]).to eq nil
end
it 'should be to sets and gets value' do
described_instance[:data] = 'hogehoge'
expect(described_instance[:data]).to eq 'hogehoge'
end
end
end
| 25.321429 | 57 | 0.688293 |
f7efd8be045ea606d6fcd9a7f0a5b676ff924f4c | 861 | require "spec_helper"
describe Rrserver::Server do
let(:server) { Rrserver::Server.new(application, sockets) }
let(:application) { double(:application) }
let(:socket) { double(:socket) }
let(:sockets) { [socket] }
describe "#run" do
it "handles interrupt" do
expect(server).to receive(:monitor) { raise Interrupt.new }
expect(Rrserver.logger).to receive(:log).with("Interrupted")
server.run
end
end
describe "#monitor" do
it "selects then accepts and handles a connection" do
io = double(:io)
socket = double(:socket)
http = double(:http)
expect(Rrserver::HTTP).to receive(:new).with(socket, application) { http }
expect(http).to receive(:handle)
expect(IO).to receive(:select).with(sockets) { io }
expect(io).to receive(:accept) { socket }
expect(socket).to receive(:close)
server.monitor
end
end
end | 25.323529 | 77 | 0.684088 |
4a2d9e1325a96b835349ad4c9cb002b52de086dc | 5,607 | # Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
class AccountsController < EntitiesController
before_filter :get_data_for_sidebar, :only => :index
# GET /accounts
#----------------------------------------------------------------------------
def index
@accounts = get_accounts(:page => params[:page], :per_page => params[:per_page])
respond_with @accounts do |format|
format.xls { render :layout => 'header' }
format.csv { render :csv => @accounts }
end
end
# GET /accounts/1
# AJAX /accounts/1
#----------------------------------------------------------------------------
def show
@stage = Setting.unroll(:opportunity_stage)
@comment = Comment.new
@timeline = timeline(@account)
respond_with(@account)
end
# GET /accounts/new
#----------------------------------------------------------------------------
def new
@account.attributes = {:user => current_user, :access => Setting.default_access, :assigned_to => nil}
if params[:related]
model, id = params[:related].split('_')
instance_variable_set("@#{model}", model.classify.constantize.find(id))
end
respond_with(@account)
end
# GET /accounts/1/edit AJAX
#----------------------------------------------------------------------------
def edit
if params[:previous].to_s =~ /(\d+)\z/
@previous = Account.my.find_by_id($1) || $1.to_i
end
respond_with(@account)
end
# POST /accounts
#----------------------------------------------------------------------------
def create
@comment_body = params[:comment_body]
respond_with(@account) do |format|
if @account.save
@account.add_comment_by_user(@comment_body, current_user)
# None: account can only be created from the Accounts index page, so we
# don't have to check whether we're on the index page.
@accounts = get_accounts
get_data_for_sidebar
end
end
end
# PUT /accounts/1
#----------------------------------------------------------------------------
def update
respond_with(@account) do |format|
# Must set access before user_ids, because user_ids= method depends on access value.
@account.access = params[:account][:access] if params[:account][:access]
get_data_for_sidebar if @account.update_attributes(params[:account])
end
end
# DELETE /accounts/1
#----------------------------------------------------------------------------
def destroy
@account.destroy
respond_with(@account) do |format|
format.html { respond_to_destroy(:html) }
format.js { respond_to_destroy(:ajax) }
end
end
# PUT /accounts/1/attach
#----------------------------------------------------------------------------
# Handled by EntitiesController :attach
# PUT /accounts/1/discard
#----------------------------------------------------------------------------
# Handled by EntitiesController :discard
# POST /accounts/auto_complete/query AJAX
#----------------------------------------------------------------------------
# Handled by ApplicationController :auto_complete
# POST /accounts/redraw AJAX
#----------------------------------------------------------------------------
def redraw
current_user.pref[:accounts_per_page] = params[:per_page] if params[:per_page]
current_user.pref[:accounts_sort_by] = Account::sort_by_map[params[:sort_by]] if params[:sort_by]
@accounts = get_accounts(:page => 1, :per_page => params[:per_page])
set_options # Refresh options
respond_with(@accounts) do |format|
format.js { render :index }
end
end
# POST /accounts/filter AJAX
#----------------------------------------------------------------------------
def filter
session[:accounts_filter] = params[:category]
@accounts = get_accounts(:page => 1, :per_page => params[:per_page])
respond_with(@accounts) do |format|
format.js { render :index }
end
end
private
#----------------------------------------------------------------------------
alias :get_accounts :get_list_of_records
#----------------------------------------------------------------------------
def respond_to_destroy(method)
if method == :ajax
@accounts = get_accounts
get_data_for_sidebar
if @accounts.empty?
@accounts = get_accounts(:page => current_page - 1) if current_page > 1
render :index and return
end
# At this point render default destroy.js
else # :html request
self.current_page = 1 # Reset current page to 1 to make sure it stays valid.
flash[:notice] = t(:msg_asset_deleted, @account.name)
redirect_to accounts_path
end
end
#----------------------------------------------------------------------------
def get_data_for_sidebar
@account_category_total = Hash[
Setting.account_category.map do |key|
[ key, Account.my.where(:category => key.to_s).count ]
end
]
categorized = @account_category_total.values.sum
@account_category_total[:all] = Account.my.count
@account_category_total[:other] = @account_category_total[:all] - categorized
end
end
| 35.264151 | 105 | 0.502229 |
7a93847ec8abd8f757d58ddf99733f6063a5e39e | 387 | require 'rails'
require 'sprockets/railtie'
require 'jquery/rails'
require 'gl-matrix'
require 'jax/core/shader_processor'
require 'jax/core/generators/all'
require 'jax/core/matchers'
module Jax
module Core
class Railtie < Rails::Engine
initializer 'jax.shaders' do |app|
app.assets.register_engine '.glsl', Jax::Core::ShaderProcessor
end
end
end
end
| 21.5 | 73 | 0.715762 |
393fb19628d25560dd5e5199bc50195f74895378 | 4,922 | require 'spec_helper'
describe 'puppet_enterprise::profile::controller' do
it { should contain_package("pe-client-tools") }
[
"/etc/puppetlabs/client-tools",
"/etc/puppetlabs/client-tools/ssl",
"/etc/puppetlabs/client-tools/ssl/certs"
].each do |path|
it { should contain_file(path)
.with_ensure('directory')
.with_owner('root')
.with_group('root')
.with_mode('0755')
.with_recurse(false) }
end
it { should contain_file("/etc/puppetlabs/client-tools/ssl/certs/ca.pem")
.with_ensure('present')
.with_owner('root')
.with_group('root')
.with_mode('0444')
.with_source("/etc/puppetlabs/puppet/ssl/certs/ca.pem") }
describe "puppet-code" do
describe "when manage_puppet_code is false" do
let(:params) { {"manage_puppet_code" => false} }
it { should_not contain_file("/etc/puppetlabs/client-tools/puppet-code.conf") }
end
describe "when manage_puppet_code is true" do
let(:params) { {"manage_puppet_code" => true} }
it { should contain_file("/etc/puppetlabs/client-tools/puppet-code.conf")
.with_ensure('present')
.with_owner('root')
.with_group('root')
.with_mode('0444') }
it "defaults the service URL to the CA server for puppet-code" do
raw = catalogue.resource('file', '/etc/puppetlabs/client-tools/puppet-code.conf')[:content]
json = JSON.parse(raw)
expect(json['service-url']).to eq("https://master.rspec:8170/code-manager")
end
end
end
describe "orchestrator" do
describe "when manage_orchestrator is false" do
let(:params) { {"manage_orchestrator" => false} }
it { should_not contain_file("/etc/puppetlabs/client-tools/orchestrator.conf") }
end
describe "when manage_orchestrator is true" do
let(:params) { {"manage_orchestrator" => true} }
it { should contain_file("/etc/puppetlabs/client-tools/orchestrator.conf")
.with_ensure('present')
.with_owner('root')
.with_group('root')
.with_mode('0444') }
it "defaults the service URL to the CA server for orchestrator" do
raw = catalogue.resource('file', '/etc/puppetlabs/client-tools/orchestrator.conf')[:content]
json = JSON.parse(raw)
expect(json['options']['service-url']).to eq("https://master.rspec:8143")
end
end
end
describe "puppet-access" do
it { should contain_file("/etc/puppetlabs/client-tools/puppet-access.conf")
.with_ensure('present')
.with_owner('root')
.with_group('root')
.with_mode('0444') }
it "defaults the service URL to the console-server rbac instance" do
raw = catalogue.resource('file', '/etc/puppetlabs/client-tools/puppet-access.conf')[:content]
json = JSON.parse(raw)
expect(json['service-url']).to eq("https://console.rspec:4433/rbac-api")
expect(json['certificate-file']).to eq("/etc/puppetlabs/puppet/ssl/certs/ca.pem")
end
end
describe "puppetdb-cli" do
it { should contain_file("/etc/puppetlabs/client-tools/puppetdb.conf")
.with_ensure('present')
.with_owner('root')
.with_group('root')
.with_mode('0444') }
it "defaults the service URL to the PuppetDB instance" do
raw = catalogue.resource('file', '/etc/puppetlabs/client-tools/puppetdb.conf')[:content]
json = JSON.parse(raw)['puppetdb']
expect(json['server_urls']).to eq(["https://puppetdb.rspec:8081"])
expect(json['cacert']).to eq("/etc/puppetlabs/puppet/ssl/certs/ca.pem")
end
end
describe "puppetdb-cli with multiple PuppetDBs" do
let(:pre_condition) {
<<-PRE_COND
class {'puppet_enterprise':
certificate_authority_host => 'ca.rspec',
puppet_master_host => 'master.rspec',
console_host => 'console.rspec',
puppetdb_host => ['puppetdb.rspec', 'other.rspec'],
puppetdb_port => ['8080', '9999'],
database_host => 'database.rspec',
mcollective_middleware_hosts => ['mco.rspec'],
pcp_broker_host => 'pcp_broker.rspec',
}
PRE_COND
}
it { should contain_file("/etc/puppetlabs/client-tools/puppetdb.conf")
.with_ensure('present')
.with_owner('root')
.with_group('root')
.with_mode('0444') }
it "defaults the service URL to the PuppetDB instance" do
raw = catalogue.resource('file', '/etc/puppetlabs/client-tools/puppetdb.conf')[:content]
json = JSON.parse(raw)['puppetdb']
expect(json['server_urls']).to eq(["https://puppetdb.rspec:8080", "https://other.rspec:9999"])
expect(json['cacert']).to eq("/etc/puppetlabs/puppet/ssl/certs/ca.pem")
end
end
it { should satisfy_all_relationships }
end
| 37.287879 | 100 | 0.624137 |
088005c388df47d63fec366b26b606a91db22f47 | 2,794 | # -*- encoding: utf-8 -*-
# stub: autoprefixer-rails 9.8.6.1 ruby lib
Gem::Specification.new do |s|
s.name = "autoprefixer-rails".freeze
s.version = "9.8.6.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "bug_tracker_uri" => "https://github.com/ai/autoprefixer-rails/issues", "changelog_uri" => "https://github.com/ai/autoprefixer-rails/blob/master/CHANGELOG.md", "source_code_uri" => "https://github.com/ai/autoprefixer-rails" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["Andrey Sitnik".freeze]
s.date = "2020-08-01"
s.email = "[email protected]".freeze
s.extra_rdoc_files = ["README.md".freeze, "LICENSE".freeze, "CHANGELOG.md".freeze]
s.files = ["CHANGELOG.md".freeze, "LICENSE".freeze, "README.md".freeze]
s.homepage = "https://github.com/ai/autoprefixer-rails".freeze
s.licenses = ["MIT".freeze]
s.post_install_message = "autoprefixer-rails was deprected. Migration guide:\nhttps://github.com/ai/autoprefixer-rails/wiki/Deprecated".freeze
s.required_ruby_version = Gem::Requirement.new(">= 2.4".freeze)
s.rubygems_version = "3.0.8".freeze
s.summary = "Parse CSS and add vendor prefixes to CSS rules using values from the Can I Use website.".freeze
s.installed_by_version = "3.0.8" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<execjs>.freeze, [">= 0"])
s.add_development_dependency(%q<rails>.freeze, [">= 0"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<rspec-rails>.freeze, [">= 0"])
s.add_development_dependency(%q<rubocop>.freeze, ["~> 0.85.1"])
s.add_development_dependency(%q<rubocop-packaging>.freeze, ["~> 0.1.1"])
s.add_development_dependency(%q<standard>.freeze, [">= 0"])
else
s.add_dependency(%q<execjs>.freeze, [">= 0"])
s.add_dependency(%q<rails>.freeze, [">= 0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<rspec-rails>.freeze, [">= 0"])
s.add_dependency(%q<rubocop>.freeze, ["~> 0.85.1"])
s.add_dependency(%q<rubocop-packaging>.freeze, ["~> 0.1.1"])
s.add_dependency(%q<standard>.freeze, [">= 0"])
end
else
s.add_dependency(%q<execjs>.freeze, [">= 0"])
s.add_dependency(%q<rails>.freeze, [">= 0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<rspec-rails>.freeze, [">= 0"])
s.add_dependency(%q<rubocop>.freeze, ["~> 0.85.1"])
s.add_dependency(%q<rubocop-packaging>.freeze, ["~> 0.1.1"])
s.add_dependency(%q<standard>.freeze, [">= 0"])
end
end
| 50.8 | 270 | 0.665354 |
1a624b001d4be8da2f7ea7337670c98200b26f81 | 5,507 | #
# Author:: Adam Jacob (<[email protected]>)
# Author:: Benjamin Black (<[email protected]>)
# Author:: Bryan McLellan (<[email protected]>)
# Author:: Daniel DeLeo (<[email protected]>)
# Author:: Doug MacEachern (<[email protected]>)
# Author:: James Gartrell (<[email protected]>)
# Author:: Isa Farnik (<[email protected]>)
# Copyright:: Copyright (c) 2008-2018, Chef Software Inc.
# Copyright:: Copyright (c) 2009 Bryan McLellan
# Copyright:: Copyright (c) 2009 Daniel DeLeo
# Copyright:: Copyright (c) 2010 VMware, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Ohai.plugin(:Hostname) do
require "socket" unless defined?(Socket)
require "ipaddr"
provides "domain", "hostname", "fqdn", "machinename"
# hostname : short hostname
# machinename : output of hostname command (might be short on solaris)
# fqdn : result of canonicalizing hostname using DNS or /etc/hosts
# domain : domain part of FQDN
#
# hostname and machinename should always exist
# fqdn and domain may be broken if DNS is broken on the host
def from_cmd(cmd)
so = shell_out(cmd)
so.stdout.split($/)[0]
end
# forward and reverse lookup to canonicalize FQDN (hostname -f equivalent)
# this is ipv6-safe, works on ruby 1.8.7+
def resolve_fqdn
hostname = from_cmd("hostname")
addrinfo = Socket.getaddrinfo(hostname, nil).first
iaddr = IPAddr.new(addrinfo[3])
Socket.gethostbyaddr(iaddr.hton)[0]
rescue
nil
end
def collect_domain
# Domain is everything after the first dot
if fqdn
fqdn =~ /.+?\.(.*)/
domain $1
end
end
def collect_hostname
# Hostname is everything before the first dot
if machinename
machinename =~ /([^.]+)\.?/
hostname $1
elsif fqdn
fqdn =~ /(.+?)\./
hostname $1
end
end
collect_data(:default) do
machinename from_cmd("hostname")
fqdn resolve_fqdn
collect_hostname
collect_domain
end
collect_data(:aix) do
machinename from_cmd("hostname -s")
fqdn resolve_fqdn || from_cmd("hostname")
collect_hostname
collect_domain
end
collect_data(:netbsd, :openbsd, :dragonflybsd) do
hostname from_cmd("hostname -s")
fqdn resolve_fqdn
machinename from_cmd("hostname")
collect_domain
end
collect_data(:darwin) do
hostname from_cmd("hostname -s")
machinename from_cmd("hostname")
begin
ourfqdn = resolve_fqdn
# Sometimes... very rarely, but sometimes, 'hostname --fqdn' falsely
# returns a blank string. WTF.
if ourfqdn.nil? || ourfqdn.empty?
logger.trace("Plugin Hostname: hostname returned an empty string, retrying once.")
ourfqdn = resolve_fqdn
end
if ourfqdn.nil? || ourfqdn.empty?
logger.trace("Plugin Hostname: hostname returned an empty string twice and will" +
"not be set.")
else
fqdn ourfqdn
end
rescue
logger.trace(
"Plugin Hostname: hostname returned an error, probably no domain set"
)
end
domain collect_domain
end
collect_data(:freebsd) do
hostname from_cmd("hostname -s")
machinename from_cmd("hostname")
fqdn from_cmd("hostname -f")
collect_domain
end
collect_data(:linux) do
hostname from_cmd("hostname -s")
machinename from_cmd("hostname")
begin
ourfqdn = from_cmd("hostname --fqdn")
# Sometimes... very rarely, but sometimes, 'hostname --fqdn' falsely
# returns a blank string. WTF.
if ourfqdn.nil? || ourfqdn.empty?
logger.trace("Plugin Hostname: hostname --fqdn returned an empty string, retrying " +
"once.")
ourfqdn = from_cmd("hostname --fqdn")
end
if ourfqdn.nil? || ourfqdn.empty?
logger.trace("Plugin Hostname: hostname --fqdn returned an empty string twice and " +
"will not be set.")
else
fqdn ourfqdn
end
rescue
logger.trace("Plugin Hostname: hostname --fqdn returned an error, probably no domain set")
end
domain collect_domain
end
collect_data(:solaris2) do
machinename from_cmd("hostname")
hostname from_cmd("hostname")
fqdn resolve_fqdn
domain collect_domain
end
collect_data(:windows) do
require "wmi-lite/wmi"
require "socket" unless defined?(Socket)
wmi = WmiLite::Wmi.new
host = wmi.first_of("Win32_ComputerSystem")
hostname host["dnshostname"].to_s
machinename host["name"].to_s
info = Socket.gethostbyname(Socket.gethostname)
if info.first =~ /.+?\.(.*)/
fqdn info.first
else
# host is not in dns. optionally use:
# C:\WINDOWS\system32\drivers\etc\hosts
info[3..info.length].reverse_each do |addr|
hostent = Socket.gethostbyaddr(addr)
if hostent.first =~ /.+?\.(.*)/
fqdn hostent.first
break
end
end
fqdn info.first unless fqdn
end
domain collect_domain
end
end
| 28.832461 | 96 | 0.660614 |
33cbd74dce0dca44316e19012e1311b64304ce8a | 1,021 | require 'benchmark'
module Dsm
class Time
attr_accessor :total_duration, :recent_duration, :overall_average, :recent_average, :estimate
def initialize
@recent_durations = []
@total_duration = 0.0
end
def measure(iteration, iterations_remaining)
@recent_duration = Benchmark.realtime do
yield
end
@recent_durations.shift() if @recent_durations.size == 10
@recent_durations.push(@recent_duration)
@total_duration += @recent_duration
@overall_average = (@total_duration / iteration)
@recent_average = (@recent_durations.inject{ |sum, el| sum + el }.to_f / @recent_durations.size)
total_seconds_remaining = (iterations_remaining * @recent_average).round(0)
minutes = (total_seconds_remaining / 60.0).floor
@estimate = {
:hours => (minutes / 60.0).floor,
:minutes => minutes % 60,
:seconds => total_seconds_remaining % 60,
:total_seconds => total_seconds_remaining
}
end
end
end
| 30.029412 | 102 | 0.666993 |
1124055e135390c420e7bd3753bed6cde62e01e7 | 2,538 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_06_11_233925) do
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.integer "record_id", null: false
t.integer "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "activities", force: :cascade do |t|
t.integer "venue_id"
t.string "name"
t.text "schedule"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "description"
t.string "difficultylevel"
t.index ["venue_id"], name: "index_activities_on_venue_id"
end
create_table "categories", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "description"
end
create_table "clasifications", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "category_id"
t.integer "activity_id"
end
create_table "venues", force: :cascade do |t|
t.string "name"
t.string "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "phone"
end
end
| 36.782609 | 126 | 0.718676 |
bfe72bffd23f1ac7793d94f179b1eb1be10bb6db | 284 | maintainer "Opscode, Inc."
maintainer_email "[email protected]"
license "Apache 2.0"
description "Updates package list for pacman and has LWRP for pacman groups"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0.2"
| 40.571429 | 81 | 0.704225 |
18e7e0a8202d29bd4c0d791a1127ad2d807344d4 | 5,899 | =begin
#DocuSign REST API
#The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2.1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end
require 'date'
module DocuSign_eSign
class ConditionalRecipientRuleCondition
#
attr_accessor :filters
#
attr_accessor :order
#
attr_accessor :recipient_label
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'filters' => :'filters',
:'order' => :'order',
:'recipient_label' => :'recipientLabel'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'filters' => :'Array<ConditionalRecipientRuleFilter>',
:'order' => :'String',
:'recipient_label' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'filters')
if (value = attributes[:'filters']).is_a?(Array)
self.filters = value
end
end
if attributes.has_key?(:'order')
self.order = attributes[:'order']
end
if attributes.has_key?(:'recipientLabel')
self.recipient_label = attributes[:'recipientLabel']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
filters == o.filters &&
order == o.order &&
recipient_label == o.recipient_label
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[filters, order, recipient_label].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = DocuSign_eSign.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.497585 | 123 | 0.615867 |
7a7292a62fa00d6f9a4b39579b3faf03c5d58565 | 2,613 | # frozen_string_literal: true
describe Facter::Resolvers::Xen do
subject(:xen_resolver) { Facter::Resolvers::Xen }
let(:proc_xen_file) { false }
let(:xvda1_file) { false }
let(:log_spy) { instance_spy(Facter::Log) }
let(:domains) { '' }
before do
xen_resolver.instance_variable_set(:@log, log_spy)
allow(File).to receive(:exist?).with('/dev/xen/evtchn').and_return(evtchn_file)
allow(File).to receive(:exist?).with('/proc/xen').and_return(proc_xen_file)
allow(File).to receive(:exist?).with('/dev/xvda1').and_return(xvda1_file)
allow(File).to receive(:exist?).with('/usr/lib/xen-common/bin/xen-toolstack').and_return(false)
allow(File).to receive(:exist?).with('/usr/sbin/xl').and_return(false)
allow(File).to receive(:exist?).with('/usr/sbin/xm').and_return(true)
allow(Facter::Core::Execution).to receive(:execute).with('/usr/sbin/xm list', logger: log_spy).and_return(domains)
xen_resolver.invalidate_cache
end
after do
xen_resolver.invalidate_cache
end
context 'when xen is privileged' do
context 'when /dev/xen/evtchn exists' do
let(:domains) { load_fixture('xen_domains').read }
let(:evtchn_file) { true }
it 'returns xen0' do
expect(xen_resolver.resolve(:vm)).to eq('xen0')
end
it 'detects xen as privileged' do
expect(xen_resolver.resolve(:privileged)).to be_truthy
end
it 'does not check other files' do
expect(File).not_to have_received(:exist?).with('/proc/xen')
end
it 'returns domains' do
expect(xen_resolver.resolve(:domains)).to eq(%w[win linux])
end
end
context 'when /dev/xen/evtchn does not exist' do
let(:evtchn_file) { false }
before do
allow(Facter::Util::FileHelper)
.to receive(:safe_read)
.with('/proc/xen/capabilities', nil)
.and_return('control_d')
end
it 'detects xen as privileged' do
expect(xen_resolver.resolve(:privileged)).to be_truthy
end
end
end
context 'when xen is unprivileged' do
let(:evtchn_file) { false }
let(:xvda1_file) { true }
before do
allow(Facter::Util::FileHelper)
.to receive(:safe_read)
.with('/proc/xen/capabilities', nil)
.and_return(nil)
end
it 'returns xenu' do
expect(xen_resolver.resolve(:vm)).to eq('xenu')
end
it 'detects xen as unprivileged' do
expect(xen_resolver.resolve(:privileged)).to be_falsey
end
it 'does not detect domains' do
expect(xen_resolver.resolve(:domains)).to be_nil
end
end
end
| 29.033333 | 118 | 0.652507 |
bf3c9b5efb698217e08e973c94d9b1fc50cdf3a5 | 869 | ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
require "minitest/reporters"
Minitest::Reporters.use!
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
include ApplicationHelper
# Add more helper methods to be used by all tests here...
# テストユーザーがログイン中の場合にtrueを返す
def is_logged_in?
!session[:user_id].nil?
end
def log_in_as(user)
session[:user_id] = user.id
end
end
class ActionDispatch::IntegrationTest
# login as test user
def log_in_as(user, password: 'password', remember_me: '1')
post login_path, params: { session: { email: user.email,
password: password,
remember_me: remember_me }}
end
end
| 26.333333 | 82 | 0.659379 |
3837b115402c4ab4ced5a7d7d1ad816c250f2d0f | 1,769 | class Libspnav < Formula
desc "Client library for connecting to 3Dconnexion's 3D input devices"
homepage "https://spacenav.sourceforge.io"
url "https://downloads.sourceforge.net/project/spacenav/spacenav%20library%20%28SDK%29/libspnav%200.2.3/libspnav-0.2.3.tar.gz"
sha256 "7ae4d7bb7f6a5dda28b487891e01accc856311440f582299760dace6ee5f1f93"
livecheck do
url :stable
regex(%r{url=.*?/libspnav[._-]v?(\d+(?:\.\d+)+)\.t}i)
end
bottle do
cellar :any
sha256 "8260c77f747105cff878f66c1c622d9138cb8040c3423b87f5fdfd85ae0a4698" => :big_sur
sha256 "9d7234296b1bdb5c4dd0f1aa5855cca877d2eba7fa83812c34438e7cf401a3cf" => :catalina
sha256 "a428a0b1037ff3dfd5a7ba2463f6ca96717e69be734627d8d7abd079f17fb7d5" => :mojave
sha256 "d61c3082aef6a700ad02d553304add7bb6bb2541236a97cf0a571dcc88f67d16" => :high_sierra
sha256 "55cf0552148451302bb50c04a843d8d3834ca95a38c79bf5270f20ac49f82d41" => :sierra
sha256 "48685db33ebe4acb821b33dbd609f95d03c47bd6c316b08f1bc1110d86271643" => :el_capitan
sha256 "87bf93469bb14eef1a24de81cd521f6a62363a6aa7c04a319f3f18905de039b1" => :yosemite
sha256 "f425659deb611eacb94f2245f0c8f8235aa0169a422874f2aa2c32f8d207b84a" => :mavericks
end
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--disable-x11
]
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <spnav.h>
int main() {
bool connected = spnav_open() != -1;
if (connected) spnav_close();
return 0;
}
EOS
system ENV.cc, "test.cpp", "-I#{include}", "-L#{lib}", "-lspnav", "-o", "test"
system "./test"
end
end
| 34.686275 | 128 | 0.716224 |
1cf13983e625397c7dc4434901a667efd1600599 | 1,623 | # typed: false
# frozen_string_literal: true
# This file was generated by GoReleaser. DO NOT EDIT.
class GoreleaserWork < Formula
desc "show version info only."
homepage "https://github.com/reiki4040/goreleaser-work"
version "0.2.4"
on_macos do
if Hardware::CPU.intel?
url "https://github.com/reiki4040/goreleaser-work/releases/download/v0.2.4/goreleaser-work_darwin_x86_64.tar.gz"
sha256 "ced9d8d7a111c224ec1b7d0dd4dea90aae9fc99b1fefe18d58f0ca1c84023a1c"
def install
bin.install "goreleaser-work"
end
end
if Hardware::CPU.arm?
url "https://github.com/reiki4040/goreleaser-work/releases/download/v0.2.4/goreleaser-work_darwin_arm64.tar.gz"
sha256 "3593cea4d55cc6484634a569e2bad3edf6aa721a052ab81b5e9b14629a55eee9"
def install
bin.install "goreleaser-work"
end
end
end
on_linux do
if Hardware::CPU.arm? && Hardware::CPU.is_64_bit?
url "https://github.com/reiki4040/goreleaser-work/releases/download/v0.2.4/goreleaser-work_linux_arm64.tar.gz"
sha256 "66202198010f845b9ca965437342c134364d4d5756c45ead42213e63a2da2098"
def install
bin.install "goreleaser-work"
end
end
if Hardware::CPU.intel?
url "https://github.com/reiki4040/goreleaser-work/releases/download/v0.2.4/goreleaser-work_linux_x86_64.tar.gz"
sha256 "07308a3668a24e0ed34a89a7bb251277028444c76061ad792e40ad2125d664da"
def install
bin.install "goreleaser-work"
end
end
end
def caveats; <<~EOS
this message is shown on console when finished install with homebrew.
EOS
end
end
| 30.622642 | 118 | 0.728281 |
5db145ddfff38bcb2f8b4f7d6b4f0a830630c366 | 284 | class Product < ActiveRecord::Base
acts_as_superclass
belongs_to :store
belongs_to :organization
validates_presence_of :name, :price
attr_accessible :name, :price
def parent_method
"#{name} - #{price}"
end
def dummy_raise_method(obj)
obj.dummy
end
end
| 14.2 | 37 | 0.714789 |
188119a768b60dd7112e549f7dbab7abfbb94fc8 | 3,627 | # frozen_string_literal: true
require "test_helper"
class Rus3EvaluatorTranslatorfTest < Minitest::Test
def setup
@parser = Rubasteme.parser
@translator = Rus3::Evaluator::Translator.new
end
def test_it_can_translate_boolean
tcs = { "#f" => "false", "#false" => "false",
"#t" => "true", "#true" => "true", }
assert_translate(tcs)
end
def test_it_can_translate_identifier
tcs = { "foo" => "foo", "define" => "define", }
assert_translate(tcs)
end
def test_it_can_translate_character
tcs = { "\#\\a" => "Char.new(\"a\")", }
assert_translate(tcs)
end
def test_it_can_translate_string
tcs = { "hoge" => "hoge", }
assert_translate(tcs)
end
def test_it_can_translate_peculiar_identifier
tcs = { "+" => "test_plus" }
@translator.add_procedure_map({"+" => "test_plus",})
assert_translate(tcs)
end
def test_it_can_translate_list
tcs = { "'(1 2 3)" => '[Integer("1"), Integer("2"), Integer("3")]',
"'(1 (2 3) (4 (5 6) 7))" =>
'[Integer("1"), [Integer("2"), Integer("3")], [Integer("4"), [Integer("5"), Integer("6")], Integer("7")]]', }
assert_translate(tcs)
end
def test_it_can_translate_vector
tcs = { "#(foo bar baz)" => "vector(foo, bar, baz)",
"#(foo #(bar baz))" => "vector(foo, vector(bar, baz))", }
assert_translate(tcs)
end
def test_it_can_translate_lambda_expression
tcs = {
"(lambda (x y) (list x y))" => "lambda{|x, y| list(x, y)}",
}
assert_translate(tcs)
end
def test_it_can_translate_procedure_call
tcs = {
"(list \"foo\" \"bar\")" => "list(\"foo\", \"bar\")",
"((lambda (x y) (add x y)) 1 2)" =>
"lambda{|x, y| add(x, y)}.call(Integer(\"1\"), Integer(\"2\"))"
}
assert_translate(tcs)
end
def test_it_can_translate_conditional
tcs = {
"(if (eqv? n 0) \"zero\")" => "if eqv?(n, Integer(\"0\")); \"zero\"; end",
"(if (eqv? n 0) (write n) (write (add n n)))" =>
"if eqv?(n, Integer(\"0\")); write(n); else; write(add(n, n)); end",
}
assert_translate(tcs)
end
def test_it_can_translate_assignment
tcs = {
"(set! x 1)" => "x = Integer(\"1\")",
"(set! x (concat \"foo\" \"bar\"))" => "x = concat(\"foo\", \"bar\")",
}
assert_translate(tcs)
end
def test_it_can_translate_identifier_definition
tcs = {
"(define foo 3)" => "foo = Integer(\"3\")",
"(define (foo x) (write x))" => "def foo(x); write(x); end",
}
assert_translate(tcs)
end
def test_ic_can_translate_cond
tcs = {
"(cond ((eqv? n 0) (write \"zero\")))" =>
"if eqv?(n, Integer(\"0\")); write(\"zero\"); end",
"(cond ((lt? n 0) (write \"negative\")) ((eqv? n 0) (write \"zero\")))" =>
"if lt?(n, Integer(\"0\")); write(\"negative\"); elsif eqv?(n, Integer(\"0\")); write(\"zero\"); end",
"(cond ((lt? n 0) (write \"negative\")) ((eqv? n 0) (write \"zero\")) (else (write \"positive\")))" =>
"if lt?(n, Integer(\"0\")); write(\"negative\"); elsif eqv?(n, Integer(\"0\")); write(\"zero\"); else; write(\"positive\"); end",
}
assert_translate(tcs)
end
def test_it_can_translate_let
tcs = {
"(let ((x 1) (y 2)) (add x y))" =>
"lambda{|x, y| add(x, y)}.call(Integer(\"1\"), Integer(\"2\"))",
}
assert_translate(tcs)
end
private
def assert_translate(tcs)
tcs.each { |src, expected|
rb_src = @translator.translate(ast_node(src))
assert_equal expected, rb_src
}
end
def ast_node(src)
lexer = Rbscmlex::Lexer.new(src)
@parser.parse(lexer)[0]
end
end
| 28.335938 | 135 | 0.554177 |
6a171b6b36eb9824beb334a9b592a6082e46e9e5 | 3,096 | require 'formula'
class Glib < Formula
homepage 'http://developer.gnome.org/glib/'
url 'http://ftp.gnome.org/pub/gnome/sources/glib/2.36/glib-2.36.4.tar.xz'
sha256 'f654d2542329012d8475736a165dfbf82fadf3ee940c2e0e6ddd4b2fde5cad7e'
option :universal
option 'test', 'Build a debug build and run tests. NOTE: Not all tests succeed yet'
depends_on 'pkg-config' => :build
depends_on 'xz' => :build
depends_on 'gettext'
depends_on 'libffi'
fails_with :llvm do
build 2334
cause "Undefined symbol errors while linking"
end
def patches
p = {}
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix.
p[:p1] = "https://raw.github.com/gist/5393707/5a9047ab7838709084b36242a44471b02d036386/glib-configurable-paths.patch"
p[:p0] = "https://trac.macports.org/export/95596/trunk/dports/devel/glib2/files/patch-configure.diff" if build.universal?
p
end
def install
ENV.universal_binary if build.universal?
# -w is said to causes gcc to emit spurious errors for this package
ENV.enable_warnings if ENV.compiler == :gcc
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = %W[
--disable-maintainer-mode
--disable-dependency-tracking
--disable-dtrace
--disable-modular-tests
--disable-libelf
--prefix=#{prefix}
--localstatedir=#{var}
--with-gio-module-dir=#{HOMEBREW_PREFIX}/lib/gio/modules
]
system "./configure", *args
if build.universal?
system "curl 'https://trac.macports.org/export/95596/trunk/dports/devel/glib2/files/config.h.ed' | ed - config.h"
end
system "make"
# the spawn-multithreaded tests require more open files
system "ulimit -n 1024; make check" if build.include? 'test'
system "make install"
# This sucks; gettext is Keg only to prevent conflicts with the wider
# system, but pkg-config or glib is not smart enough to have determined
# that libintl.dylib isn't in the DYLIB_PATH so we have to add it
# manually.
gettext = Formula.factory('gettext').opt_prefix
inreplace lib+'pkgconfig/glib-2.0.pc' do |s|
s.gsub! 'Libs: -L${libdir} -lglib-2.0 -lintl',
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! 'Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include',
"Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end
(share+'gtk-doc').rmtree
end
test do
(testpath/'test.c').write <<-EOS.undent
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
flags = `pkg-config --cflags --libs glib-2.0`.split + ENV.cflags.split
system ENV.cc, "-o", "test", "test.c", *flags
system "./test"
end
end
| 32.93617 | 125 | 0.651163 |
e91e5e8582c3f35ce1e0066e336784ca503f87b0 | 1,221 | class Tag < ActiveRecord::Base
has_many :taggables, :through => :taggings
has_many :taggings
attr_accessible :name
attr_accessor :highlighted_attributes, :search_result_notes
validates_uniqueness_of :name, :case_sensitive => false
validates_length_of :name, :maximum => 100, :minimum => 1
validate :no_commas_in_name
after_update :reindex_tagged_objects
before_destroy do
reindex_tagged_objects
taggings.destroy_all
end
searchable do
string :type_name
text :name, :stored => true, :boost => SOLR_PRIMARY_FIELD_BOOST
end
def self.named_like(name)
where(["name ILIKE ?", "%#{name}%"])
end
def self.reset_all_counters
find_each { |tag| Tag.reset_counters(tag.id, :taggings) }
end
def self.find_or_create_by_tag_name(name)
self.where("UPPER(name) = UPPER(?)", name).first_or_create!(:name => name)
end
private
def no_commas_in_name
errors.add(:name, :invalid) if name =~ /,/
end
def reindex_tagged_objects
taggings = Tagging.where(tag_id: id)
objects_to_reindex = taggings.map(&:taggable).map do |obj|
[obj.class.to_s, obj.id]
end
QC.enqueue_if_not_queued("SolrIndexer.reindex_objects", objects_to_reindex)
end
end
| 24.918367 | 79 | 0.718264 |
f88ce18c806e676c94e8dfcabc4e3a2f99ece26e | 1,940 | RSpec.describe PreventGo::Base do
describe '.initialize' do
subject { described_class.new(file) }
let(:file) { test_file_path_for('test.pdf') }
context 'when PreventGo respond with a code 400', cassette: '400' do
it 'should raise a PreventGo::ResponseError' do
expect { subject }.to raise_error(PreventGo::ResponseError)
end
it 'should set code and message in exception' do
begin
subject
rescue PreventGo::ResponseError => e
expect(e.code).to eq('400')
expect(e.type).to eq(1205)
end
end
end
context 'when PreventGo respond with a code 403', cassette: '403' do
it 'should raise a PreventGo::ResponseError' do
expect { subject }.to raise_error(PreventGo::ResponseError)
end
it 'should set code and message in exception' do
begin
subject
rescue PreventGo::ResponseError => e
expect(e.code).to eq('403')
expect(e.type).to eq(1205)
end
end
end
context 'when PreventGo respond with a code 500', cassette: '500' do
it 'should raise a PreventGo::ResponseError' do
expect { subject }.to raise_error(PreventGo::ResponseError)
end
it 'should set code and message in exception' do
begin
subject
rescue PreventGo::ResponseError => e
expect(e.code).to eq('500')
expect(e.type).to eq(1205)
end
end
end
context 'when PreventGo respond with a code 503', cassette: '503' do
it 'should raise a PreventGo::ResponseError' do
expect { subject }.to raise_error(PreventGo::ResponseError)
end
it 'should set code and message in exception' do
begin
subject
rescue PreventGo::ResponseError => e
expect(e.code).to eq('503')
expect(e.type).to eq(1205)
end
end
end
end
end
| 28.955224 | 72 | 0.607732 |
e8271ab989b9be2b3935615145c687dea2bcd8e4 | 1,558 | # Model class for blog posts
class BlogPost < ApplicationRecord
belongs_to :blog
belongs_to :author, class_name: 'User',
foreign_key: :user_id,
inverse_of: :blog_posts
has_one :discussion, as: :resource, dependent: :destroy
delegate :hidden, to: :discussion, allow_nil: true, prefix: true
delegate :locked, to: :discussion, allow_nil: true, prefix: true
# Allowed characters for slugs: a-z A-Z 0-9 . _ -
SLUG_REGEX = %r{[-_\.a-zA-Z0-9]+}.freeze
private_constant :SLUG_REGEX
ANCHORED_SLUG_REGEX = %r{\A#{SLUG_REGEX}\z}.freeze
private_constant :ANCHORED_SLUG_REGEX
validates :blog_id, presence: true
validates :user_id, presence: true
validates :title, presence: true
validates :slug, presence: true
validates :slug, format: ANCHORED_SLUG_REGEX
validates :body, presence: true
before_validation :generate_slug, if: -> { slug.blank? && title.present? }
# Configure default count-per-page for pagination
paginates_per 20
# Add tagging features
acts_as_taggable
# Instance methods
def generate_slug
self.slug = title.parameterize
end
def posted_month
posted_at.strftime( '%m' )
end
def posted_year
posted_at.strftime( '%Y' )
end
def teaser( paragraphs: 3 )
paras = body.split %r{</p>[^<]*<p>}i
return paras[ 0..( paragraphs - 1 ) ].join( "</p>\n<p>" ) if paras.size > 1
paras = body.split %r{<br ?/?><br ?/?>}i
return paras[ 0..( paragraphs - 1 ) ].join "\n<br><br>\n" if paras.size > 1
body
end
end
| 26.862069 | 79 | 0.65982 |
21396ddeec91566b9834c8f683a5a9247b0c4ce3 | 479 | # frozen_string_literal: true
module Gadget
module Generators
#
# generate type
#
class TypeGenerator < Rails::Generators::NamedBase
source_root File.expand_path('templates', __dir__)
def check_model_existance
raise "Cannot find model #{name}" unless Module.const_defined?(name)
end
def create_type_file
template('types/type.rb.tt', "app/graphql/types/#{file_name}_type.rb", { name: name })
end
end
end
end
| 23.95 | 94 | 0.668058 |
399ea6639f76f6ef717d0c57797a8655493946f1 | 587 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require "json"
require "active_support/core_ext/hash/keys"
require "active_support/inflector"
require "rack/test"
require 'camel_patrol'
require "rails/version"
require 'minitest/autorun'
require "minitest/reporters"
Minitest::Reporters.use!(
[
Minitest::Reporters::DefaultReporter.new,
# This path should match up, save for the last directory, with the value
# in `store_test_results` and `store_artifacts` in .circleci/config.yml
Minitest::Reporters::JUnitReporter.new("tmp/test_results/minitest"),
],
)
| 29.35 | 76 | 0.754685 |
ab04e803e4f06d740fea5c7b87fc5e64c85dd6e2 | 4,506 | require 'set'
module Aws
module Api
module Docs
module Utils
include Seahorse::Model
include Seahorse::Model::Shapes
def tag(string)
YARD::DocstringParser.new.parse(string).to_docstring.tags.first
end
def summary(string)
if string
YARD::DocstringParser.new.parse(string).to_docstring.summary
else
nil
end
end
def operation_input_ref(operation, options = {})
struct = StructureShape.new
# add the response target input member if the operation is streaming
if
operation.output &&
operation.output[:payload] &&
operation.output[:payload_member][:streaming]
then
target = ShapeRef.new(shape: BlobShape.new)
target[:response_target] = true
target.documentation = "Specifies where to stream response data. You can provide the path where a file will be created on disk, or you can provide an IO object. If omitted, the response data will be loaded into memory and written to a StringIO object."
struct.add_member(:response_target, target)
end
# copy existing input members
skip = options[:without] || Set.new
if operation.input
operation.input.shape.members.each do |member_name, member_ref|
unless skip.include?(member_name.to_s)
struct.add_member(member_name, member_ref)
end
end
end
ShapeRef.new(shape: struct)
end
# Given a shape reference, this function returns a Set of all
# of the recursive shapes found in tree.
def compute_recursive_shapes(ref, stack = [], recursive = Set.new)
if ref && !stack.include?(ref.shape)
stack.push(ref.shape)
case ref.shape
when StructureShape
ref.shape.members.each do |_, member_ref|
compute_recursive_shapes(member_ref, stack, recursive)
end
when ListShape
compute_recursive_shapes(ref.shape.member, stack, recursive)
when MapShape
compute_recursive_shapes(ref.shape.value, stack, recursive)
end
stack.pop
elsif ref
recursive << ref.shape
end
recursive
end
# Given a shape ref, returns the type accepted when given as input.
def input_type(ref, link = false)
if BlobShape === ref.shape
'IO,String'
else
output_type(ref, link)
end
end
# Given a shape ref, returns the type returned in output.
def output_type(ref, link = false)
case ref.shape
when StructureShape
type = "Types::" + ref.shape.name
link ? "{#{type}}" : type
when ListShape
"Array<#{output_type(ref.shape.member, link)}>"
when MapShape
"Hash<String,#{output_type(ref.shape.value, link)}>"
when BlobShape
ref[:streaming] ? 'IO' : 'String'
when BooleanShape then 'Boolean'
when FloatShape then 'Float'
when IntegerShape then 'Integer'
when StringShape then 'String'
when TimestampShape then 'Time'
else raise "unsupported shape #{ref.shape.class.name}"
end
end
# Documents a structure member as a attribute method
def document_struct_member(yard_class, member_name, ref, read_write = true)
m = YARD::CodeObjects::MethodObject.new(yard_class, member_name)
m.scope = :instance
m.docstring = struct_member_docstring(ref.documentation, ref)
returns = "@return [#{output_type(ref)}] #{summary(ref.documentation)}"
m.add_tag(tag(returns))
yard_class.instance_attributes[member_name] = read_write ?
{ :read => m, :write => m } :
{ :read => m }
end
def struct_member_docstring(docs, ref)
if
Seahorse::Model::Shapes::StringShape === ref.shape &&
ref.shape.enum
then
docs = "#{docs} <p>Possible values:</p><ul>"
docs += ref.shape.enum.map { |v| "<li><tt>#{v}</tt></li>" }.join
docs += "</ul>"
docs
else
docs
end
end
end
end
end
end
| 33.626866 | 264 | 0.56569 |
08ba2fc5866a32b5e146848ac1f1c3364e6a3ede | 2,067 | class Libusbmuxd < Formula
desc "USB multiplexor library for iOS devices"
homepage "https://www.libimobiledevice.org/"
url "https://github.com/libimobiledevice/libusbmuxd/archive/2.0.2.tar.gz"
sha256 "8ae3e1d9340177f8f3a785be276435869363de79f491d05d8a84a59efc8a8fdc"
license all_of: ["GPL-2.0-or-later", "LGPL-2.1-or-later"]
head "https://github.com/libimobiledevice/libusbmuxd.git"
bottle do
sha256 cellar: :any, arm64_monterey: "27d1d27bbc4b9b2079bebe6460610c4c57acc9f2cffe51374ede91b717f7b49b"
sha256 cellar: :any, arm64_big_sur: "9cd9d1df802799e026f09775bbde2c4bf0557fb3e1f5919f14a5b0def0b0255e"
sha256 cellar: :any, monterey: "b5e52f9c8804a553ee3832d13c3e2a56a3253f17573addc3d8231c1503b35d07"
sha256 cellar: :any, big_sur: "faf8346e0e4caa8ac7c4ac7e3b838693f847a88120cf477b8e8c82bd0a7628f6"
sha256 cellar: :any, catalina: "72fcc67099f03a3d68faa131eaf464a431e5d5eaea0a5ddb9b8414c065f7ef73"
sha256 cellar: :any, mojave: "132ee76aa823e51abb97c92c53ab8a30819720ced7020080f949cf4fd937f6ea"
sha256 cellar: :any, high_sierra: "67c3d43cb2a1ebfd68fba1c9b51b419288fedefc93f101adeea1b5f6bdf1ad77"
sha256 cellar: :any_skip_relocation, x86_64_linux: "b1f289531042024ef7fb1f87cad05f36a1c68ece14614266cf0564e32b3565ac"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "libplist"
depends_on "libusb"
uses_from_macos "netcat" => :test
def install
system "./autogen.sh"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
source = free_port
dest = free_port
fork do
exec bin/"iproxy", "-s", "localhost", "#{source}:#{dest}"
end
sleep(2)
system "nc", "-z", "localhost", source
end
end
| 43.0625 | 123 | 0.688921 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.