hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
61b9fa6bf49b232e366aa8aa44172b44c0d144ed | 2,172 | require 'xmlscan/scanner'
require 'stringio'
module RSS
class XMLScanParser < BaseParser
class << self
def listener
XMLScanListener
end
end
private
def _parse
begin
if @rss.is_a?(String)
input = StringIO.new(@rss)
else
input = @rss
end
scanner = XMLScan::XMLScanner.new(@listener)
scanner.parse(input)
rescue XMLScan::Error => e
lineno = e.lineno || scanner.lineno || input.lineno
raise NotWellFormedError.new(lineno){e.message}
end
end
end
class XMLScanListener < BaseListener
include XMLScan::Visitor
include ListenerMixin
ENTITIES = {
'lt' => '<',
'gt' => '>',
'amp' => '&',
'quot' => '"',
'apos' => '\''
}
def on_xmldecl_version(str)
@version = str
end
def on_xmldecl_encoding(str)
@encoding = str
end
def on_xmldecl_standalone(str)
@standalone = str
end
def on_xmldecl_end
xmldecl(@version, @encoding, @standalone == "yes")
end
alias_method(:on_pi, :instruction)
alias_method(:on_chardata, :text)
alias_method(:on_cdata, :text)
def on_etag(name)
tag_end(name)
end
def on_entityref(ref)
text(entity(ref))
end
def on_charref(code)
text([code].pack('U'))
end
alias_method(:on_charref_hex, :on_charref)
def on_stag(name)
@attrs = {}
end
def on_attribute(name)
@attrs[name] = @current_attr = ''
end
def on_attr_value(str)
@current_attr << str
end
def on_attr_entityref(ref)
@current_attr << entity(ref)
end
def on_attr_charref(code)
@current_attr << [code].pack('U')
end
alias_method(:on_attr_charref_hex, :on_attr_charref)
def on_stag_end(name)
tag_start(name, @attrs)
end
def on_stag_end_empty(name)
tag_start(name, @attrs)
tag_end(name)
end
private
def entity(ref)
ent = ENTITIES[ref]
if ent
ent
else
wellformed_error("undefined entity: #{ref}")
end
end
end
end
| 17.803279 | 59 | 0.574586 |
bb24bca041b61987670ff8369c74a4e8878198c2 | 1,185 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150515162210) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "bands", force: :cascade do |t|
t.string "description"
end
create_table "bands_venues", force: :cascade do |t|
t.integer "band_id"
t.integer "venue_id"
end
create_table "venues", force: :cascade do |t|
t.string "description"
end
end
| 35.909091 | 86 | 0.760338 |
ac4473e58d4e95dc6f3ba52f238bf541cd9a141e | 1,444 | require "spec_helper"
describe Toolsmith::ViewHelpers::BootstrapHelpers do
subject { AbstractActionView.new }
let(:content_block) do
Proc.new { "content" }
end
context "#form_actions" do
let(:element) { to_element subject.form_actions(&content_block), "div" }
it "renders a container with a class" do
expect(element[:class]).to include "form-actions"
end
it "renders a container with content" do
expect(element.text).to eq("content")
end
end
context "#flash_div" do
after do
subject.flash.clear
end
it "renders a close link in the div" do
subject.flash.now[:success] = "Success!"
element = to_element(subject.flash_div(:success), "div").at_css("a.close")
expect(element).to be_present
end
described_class::FLASH_LEVELS.each do |level|
it "renders a flash div for #{level}" do
subject.flash.now[level] = "#{level}!"
element = to_element(subject.flash_div(level), "div")
element.at_css("a").remove
expect(element.text).to eq("#{level}!")
end
end
end
context "#flash_divs" do
after do
subject.flash.clear
end
it "renders all flash divs" do
described_class::FLASH_LEVELS.each do |level|
subject.flash.now[level] = "Flash #{level}!"
end
divs = Nokogiri::HTML(subject.flash_divs).css("div.alert")
expect(divs.size).to eq(4)
end
end
end
| 25.333333 | 80 | 0.641274 |
ab1497cc65930a1c82f73257fcf59c90cb48df00 | 1,274 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Logic::Mgmt::V2016_06_01
module Models
#
# Model object.
#
#
class AccessKeyRegenerateActionDefinition
include MsRestAzure
# @return [AccessKeyType] Possible values include: 'NotSpecified',
# 'Primary', 'Secondary'
attr_accessor :key_type
#
# Mapper for AccessKeyRegenerateActionDefinition class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'AccessKeyRegenerateActionDefinition',
type: {
name: 'Composite',
class_name: 'AccessKeyRegenerateActionDefinition',
model_properties: {
key_type: {
client_side_validation: true,
required: true,
serialized_name: 'keyType',
type: {
name: 'Enum',
module: 'AccessKeyType'
}
}
}
}
}
end
end
end
end
| 25.48 | 74 | 0.565934 |
abb7bc3febd67132b925fef3e8fc8d80aa80f023 | 11,084 | # frozen_string_literal: true
describe 'users update end-point', :transaction do
before(:each) do
creation = create_user build(:user, :with_credentials)
expect(creation).to be_success
@current_user = creation.value!
encoding = tokenize @current_user
expect(encoding).to be_success
@token = encoding.value!
end
before(:each) do
result = create_user build(:user, firstname: 'jane')
expect(result).to be_success
@jane = result.value!
end
context 'when given malformed id' do
it 'should return 404 Not Found' do
header 'authorization', "Bearer #{@token}"
patch_json '/api/v1/users/foo', firstname: 'john'
expect(last_response.status).to eq(404)
expect(last_response.body).to be_empty
end
end
context 'when given an invalid id' do
it 'should return 404 Not Found' do
header 'authorization', "Bearer #{@token}"
patch_json "/api/v1/users/#{SecureRandom.uuid}", firstname: 'john'
expect(last_response.status).to eq(404)
expect(last_response.body).to be_empty
end
end
context 'without HTTP_IF_MATCH' do
it 'should return 428 Precondition Required' do
header 'authorization', "Bearer #{@token}"
patch_json "/api/v1/users/#{@jane.id}", firstname: 'john'
expect(last_response.status).to eq(428)
expect(last_response.body).to be_empty
end
end
context 'with a stale resource' do
it 'should return 412 Precondition Failed' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(SecureRandom.uuid)
patch_json "/api/v1/users/#{@jane.id}", firstname: 'john'
expect(last_response.status).to eq(412)
expect(last_response.body).to be_empty
end
end
describe 'firstname' do
it 'should not be empty' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", firstname: ''
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:firstname]).to include('length must be within 1 - 255')
end
it 'should not be too long' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", firstname: 'x' * 256
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:firstname]).to include('length must be within 1 - 255')
end
it 'should be stripped and capitalized' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", firstname: " john\n"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:firstname]).to eq('John')
end
end
describe 'lastname' do
it 'should not be empty' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", lastname: ''
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:lastname]).to include('length must be within 1 - 255')
end
it 'should not be too long' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", lastname: 'x' * 256
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:lastname]).to include('length must be within 1 - 255')
end
it 'should be stripped and capitalized' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", lastname: "\tDoE\n "
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:lastname]).to eq('Doe')
end
end
describe 'birthdate' do
it 'should be a date' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", birthdate: 'not a date'
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:birthdate]).to include('must be a date')
end
it 'should be in the past' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", birthdate: Date.today
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:birthdate]).to include("must be less than #{Date.today}")
end
it 'should be stripped and ISO8601' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", birthdate: " 5 Nov 1605\n"
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:birthdate]).to eq('1605-11-05')
end
end
describe 'gender' do
it 'should be either male of female' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", gender: '?'
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:gender]).to include('must be one of: female, male')
end
it 'should be stripped and downcased' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", gender: ' MALE '
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:gender]).to eq('male')
end
end
describe 'login' do
it 'should be required when password is provided' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", password: 'secret'
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:login]).to include('must be filled')
end
it 'should not be too short' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", login: '12'
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:login]).to include('length must be within 3 - 255')
end
it 'should not be too long' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", login: 'x' * 256
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:login]).to include('length must be within 3 - 255')
end
it 'should be stripped and capitalized' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", build(:user, :with_credentials, login: " JoHn\t")
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:login]).to eq('john')
end
end
describe 'password' do
it 'should be required when login is provided' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", login: 'john'
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:password]).to include('must be filled')
end
it 'should not be too short' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", password: '12345'
expect(last_response.status).to eq(400)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:password]).to include('size cannot be less than 6')
end
it 'should not be returned' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", build(:user, :with_credentials)
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
expect(json_body).not_to include(:password)
end
end
describe 'version' do
it 'should have changed' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", build(:user, :with_credentials)
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:version]).not_to eq(@jane.version)
end
end
context 'when the user has credentials' do
before(:each) do
result = create_user build(:user, :with_credentials, firstname: 'leia')
expect(result).to be_success
@leia = result.value!
end
describe 'login' do
it 'should not be required to change the password' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@leia.version)
patch_json "/api/v1/users/#{@leia.id}", password: 'secret'
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
end
it 'should be unique' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@leia.version)
patch_json "/api/v1/users/#{@leia.id}", login: @current_user.login, password: 'secret'
expect(last_response.status).to eq(409)
expect(last_response.content_type).to eq('application/json')
expect(json_body[:login]).to eq('is already taken')
end
end
describe 'password' do
it 'should not be required to change the login' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@leia.version)
patch_json "/api/v1/users/#{@leia.id}", login: 'john'
expect(last_response.status).to eq(200)
expect(last_response.content_type).to eq('application/json')
end
end
end
it 'should update the matching user' do
header 'authorization', "Bearer #{@token}"
header 'if-match', etag(@jane.version)
patch_json "/api/v1/users/#{@jane.id}", firstname: 'john'
expect(last_response.status).to eq(200)
expect(found = find_user(@jane.id)).to be_success
expect(json_body).to eq(hiphop(found.value!))
end
end
| 40.452555 | 95 | 0.657705 |
87b5d7ceb18e7b861d43821364a5c6f3601bb66a | 4,744 | class Rust < Formula
desc "Safe, concurrent, practical language"
homepage "https://www.rust-lang.org/"
stable do
url "https://static.rust-lang.org/dist/rustc-1.30.0-src.tar.gz"
sha256 "cd0ba83fcca55b64c0c9f23130fe731dfc1882b73ae21bef96be8f2362c108ee"
resource "cargo" do
url "https://github.com/rust-lang/cargo.git",
:tag => "0.31.0",
:revision => "36d96825d0f288c6d1bb2219919a277968bd365f"
end
resource "racer" do
# Racer should stay < 2.1 for now as 2.1 needs the nightly build of rust
# See https://github.com/racer-rust/racer/tree/v2.1.2#installation
url "https://github.com/racer-rust/racer/archive/2.0.14.tar.gz"
sha256 "0442721c01ae4465843cb73b24f6caa0127c3308d72b944ad75736164756e522"
end
end
bottle do
rebuild 1
sha256 "c4043e11a5d0219a355e3444dd956a247ba330729d19bfe438ac5eaa39dd4d3b" => :mojave
sha256 "56f26ad6b7c78f0fcb2990ea66c14d28f7bc5bc8bae015a35cd76687efaaded2" => :high_sierra
sha256 "b6de1fa8e7727c619898140eb9277f72d54e453ea88b84f140c9d5bc376a87c8" => :sierra
end
head do
url "https://github.com/rust-lang/rust.git"
resource "cargo" do
url "https://github.com/rust-lang/cargo.git"
end
resource "racer" do
url "https://github.com/racer-rust/racer.git"
end
end
option "with-llvm", "Build with brewed LLVM. By default, Rust's LLVM will be used."
depends_on "cmake" => :build
depends_on "libssh2"
depends_on "openssl"
depends_on "pkg-config"
depends_on "llvm" => :optional
# According to the official readme, GCC 4.7+ is required
fails_with :gcc_4_0
fails_with :gcc_4_2
("4.3".."4.6").each do |n|
fails_with :gcc => n
end
resource "cargobootstrap" do
# From https://github.com/rust-lang/rust/blob/#{version}/src/stage0.txt
url "https://static.rust-lang.org/dist/2018-10-12/cargo-0.30.0-x86_64-apple-darwin.tar.gz"
sha256 "defc1ba047f09219a50ff39032b5d7aaf26563f6bed528b93055622eedfddabf"
end
def install
# Fix build failure for compiler_builtins "error: invalid deployment target
# for -stdlib=libc++ (requires OS X 10.7 or later)"
ENV["MACOSX_DEPLOYMENT_TARGET"] = MacOS.version
# Ensure that the `openssl` crate picks up the intended library.
# https://crates.io/crates/openssl#manual-configuration
ENV["OPENSSL_DIR"] = Formula["openssl"].opt_prefix
# Fix build failure for cmake v0.1.24 "error: internal compiler error:
# src/librustc/ty/subst.rs:127: impossible case reached" on 10.11, and for
# libgit2-sys-0.6.12 "fatal error: 'os/availability.h' file not found
# #include <os/availability.h>" on 10.11 and "SecTrust.h:170:67: error:
# expected ';' after top level declarator" among other errors on 10.12
ENV["SDKROOT"] = MacOS.sdk_path
args = ["--prefix=#{prefix}"]
args << "--disable-rpath" if build.head?
args << "--llvm-root=#{Formula["llvm"].opt_prefix}" if build.with? "llvm"
if build.head?
args << "--release-channel=nightly"
else
args << "--release-channel=stable"
end
system "./configure", *args
system "make"
system "make", "install"
resource("cargobootstrap").stage do
system "./install.sh", "--prefix=#{buildpath}/cargobootstrap"
end
ENV.prepend_path "PATH", buildpath/"cargobootstrap/bin"
resource("cargo").stage do
ENV["RUSTC"] = bin/"rustc"
system "cargo", "install", "--root", prefix, "--path", "."
end
resource("racer").stage do
ENV.prepend_path "PATH", bin
cargo_home = buildpath/"cargo_home"
cargo_home.mkpath
ENV["CARGO_HOME"] = cargo_home
system "cargo", "install", "--root", libexec, "--path", "."
(bin/"racer").write_env_script(libexec/"bin/racer", :RUST_SRC_PATH => pkgshare/"rust_src")
end
# Remove any binary files; as Homebrew will run ranlib on them and barf.
rm_rf Dir["src/{llvm,llvm-emscripten,test,librustdoc,etc/snapshot.pyc}"]
(pkgshare/"rust_src").install Dir["src/*"]
rm_rf prefix/"lib/rustlib/uninstall.sh"
rm_rf prefix/"lib/rustlib/install.log"
end
def post_install
Dir["#{lib}/rustlib/**/*.dylib"].each do |dylib|
chmod 0664, dylib
MachO::Tools.change_dylib_id(dylib, "@rpath/#{File.basename(dylib)}")
chmod 0444, dylib
end
end
test do
system "#{bin}/rustdoc", "-h"
(testpath/"hello.rs").write <<~EOS
fn main() {
println!("Hello World!");
}
EOS
system "#{bin}/rustc", "hello.rs"
assert_equal "Hello World!\n", `./hello`
system "#{bin}/cargo", "new", "hello_world", "--bin"
assert_equal "Hello, world!",
(testpath/"hello_world").cd { `#{bin}/cargo run`.split("\n").last }
end
end
| 33.885714 | 96 | 0.666315 |
ac901d12503fdf84bdba6a2507e4b76754eca9bd | 1,338 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class AddUpdatedAtIndexToWorkPackages < ActiveRecord::Migration[5.1]
def change
add_index :work_packages, :updated_at
end
end
| 39.352941 | 91 | 0.765321 |
6a0e578d214961ff07efde2912670e265b311785 | 9,930 | class Certbot < Formula
include Language::Python::Virtualenv
desc "Tool to obtain certs from Let's Encrypt and autoenable HTTPS"
homepage "https://certbot.eff.org/"
url "https://github.com/certbot/certbot/archive/v1.11.0.tar.gz"
sha256 "a5b170f81bc5aab507a480006218bd4d256e0ae7cb43c37ba46018c00299af91"
license "Apache-2.0"
head "https://github.com/certbot/certbot.git"
bottle do
sha256 "dbe0a755e36de20f8512233a7b6b55790695594f6f577ca7b5ce6730e17a04a2" => :big_sur
sha256 "5333489bcfe120e0c930ecfd8e6c25d4d51189c5a6fcaadf04da7c96c99497f9" => :arm64_big_sur
sha256 "0ea6b82bbe72e5835f38ff2c5ea01b03c67ff8a55617e78c9aca0aa6cb4d7ecf" => :catalina
sha256 "3e7936d9f31832d762e9496387293e8b7e67a0ee2da13552bb005372c5f29402" => :mojave
sha256 "a7ae57127f1ad51d43726ad954c4ff35d877044938e99c92522bd643b3f31cea" => :x86_64_linux
end
depends_on "augeas"
depends_on "dialog"
depends_on "[email protected]"
depends_on "[email protected]"
uses_from_macos "libffi"
on_linux do
depends_on "pkg-config" => :build
# https://github.com/pypa/setuptools/issues/2017#issuecomment-605354361
resource "setuptools" do
url "https://files.pythonhosted.org/packages/fd/76/3c7f726ed5c582019937f178d7478ce62716b7e8263344f1684cbe11ab3e/setuptools-45.0.0.zip"
sha256 "c46d9c8f2289535457d36c676b541ca78f7dcb736b97d02f50d17f7f15b583cc"
end
end
resource "acme" do
url "https://files.pythonhosted.org/packages/86/9a/3e275783e1ed2cf05e2347ec8c8c38cec612600abe9c28f12f2a2152e55c/acme-1.10.1.tar.gz"
sha256 "fcbb559aedc96b404edf593e78517dcd7291984d5a37036c3fc77f3c5c122fd8"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/06/a9/cd1fd8ee13f73a4d4f491ee219deeeae20afefa914dfb4c130cfc9dc397a/certifi-2020.12.5.tar.gz"
sha256 "1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/66/6a/98e023b3d11537a5521902ac6b50db470c826c682be6a8c661549cb7717a/cffi-1.14.4.tar.gz"
sha256 "1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "ConfigArgParse" do
url "https://files.pythonhosted.org/packages/bb/79/3045743bb26ca2e44a1d317c37395462bfed82dbbd38e69a3280b63696ce/ConfigArgParse-1.2.3.tar.gz"
sha256 "edd17be986d5c1ba2e307150b8e5f5107aba125f3574dddd02c85d5cdcfd37dc"
end
resource "configobj" do
url "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz"
sha256 "a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/94/5c/42de91c7fbdb817b2d9a4e64b067946eb38a4eb36c1a09c96c87a0f86a82/cryptography-3.2.1.tar.gz"
sha256 "d3d5e10be0cf2a12214ddee45c6bd203dab435e3d83b4560c03066eda600bfe3"
end
resource "distro" do
url "https://files.pythonhosted.org/packages/a6/a4/75064c334d8ae433445a20816b788700db1651f21bdb0af33db2aab142fe/distro-1.5.0.tar.gz"
sha256 "0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ea/b7/e0e3c1c467636186c39925827be42f16fee389dc404ac29e930e9136be70/idna-2.10.tar.gz"
sha256 "b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"
end
resource "josepy" do
url "https://files.pythonhosted.org/packages/18/ae/3b8a55f257adb1f0f8657e14d9e7da5eacf0dc088ef0050103b3ae408029/josepy-1.5.0.tar.gz"
sha256 "502a36f86efe2a6d09bf7018bca9fd8f8f24d8090a966aa037dbc844459ff9c8"
end
resource "parsedatetime" do
url "https://files.pythonhosted.org/packages/a8/20/cb587f6672dbe585d101f590c3871d16e7aec5a576a1694997a3777312ac/parsedatetime-2.6.tar.gz"
sha256 "4cb368fbb18a0b7231f4d76119165451c8d2e35951455dfee97c62a87b04d455"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz"
sha256 "2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"
end
resource "pyOpenSSL" do
url "https://files.pythonhosted.org/packages/09/67/38a1080fabbb908515c47694238e00b6a73a82d9dbc76372263a231abfcb/pyOpenSSL-20.0.0.tar.gz"
sha256 "92f08eccbd73701cf744e8ffd6989aa7842d48cbe3fea8a7c031c5647f590ac5"
end
resource "pyparsing" do
url "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz"
sha256 "c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"
end
resource "pyRFC3339" do
url "https://files.pythonhosted.org/packages/00/52/75ea0ae249ba885c9429e421b4f94bc154df68484847f1ac164287d978d7/pyRFC3339-1.1.tar.gz"
sha256 "81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"
end
resource "python-augeas" do
url "https://files.pythonhosted.org/packages/af/cc/5064a3c25721cd863e6982b87f10fdd91d8bcc62b6f7f36f5231f20d6376/python-augeas-1.1.0.tar.gz"
sha256 "5194a49e86b40ffc57055f73d833f87e39dce6fce934683e7d0d5bbb8eff3b8c"
end
resource "pytz" do
url "https://files.pythonhosted.org/packages/09/07/448a8887c7195450604dfc0305d80d74324c36ee18ed997664051d4bffe3/pytz-2020.4.tar.gz"
sha256 "3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/9f/14/4a6542a078773957aa83101336375c9597e6fe5889d20abda9c38f9f3ff2/requests-2.25.0.tar.gz"
sha256 "7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8"
end
resource "requests-toolbelt" do
url "https://files.pythonhosted.org/packages/28/30/7bf7e5071081f761766d46820e52f4b16c8a08fef02d2eb4682ca7534310/requests-toolbelt-0.9.1.tar.gz"
sha256 "968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
end
resource "six" do
url "https://files.pythonhosted.org/packages/6b/34/415834bfdafca3c5f451532e8a8d9ba89a21c9743a0c59fbd0205c7f9426/six-1.15.0.tar.gz"
sha256 "30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/29/e6/d1a1d78c439cad688757b70f26c50a53332167c364edb0134cadd280e234/urllib3-1.26.2.tar.gz"
sha256 "19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"
end
resource "zope.component" do
url "https://files.pythonhosted.org/packages/5d/e7/14935f56295e6a9a4ebcc82e30403f4bf580760801ea7ac6bae61ef0267b/zope.component-4.6.2.tar.gz"
sha256 "91628918218b3e6f6323de2a7b845e09ddc5cae131c034896c051b084bba3c92"
end
resource "zope.deferredimport" do
url "https://files.pythonhosted.org/packages/b9/74/6eb2dcf013fac35d086abef2435b5a6621435c2b0c166ef5b63a1b51e91d/zope.deferredimport-4.3.1.tar.gz"
sha256 "57b2345e7b5eef47efcd4f634ff16c93e4265de3dcf325afc7315ade48d909e1"
end
resource "zope.deprecation" do
url "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz"
sha256 "0d453338f04bacf91bbfba545d8bcdf529aa829e67b705eac8c1a7fdce66e2df"
end
resource "zope.event" do
url "https://files.pythonhosted.org/packages/30/00/94ed30bfec18edbabfcbd503fcf7482c5031b0fbbc9bc361f046cb79781c/zope.event-4.5.0.tar.gz"
sha256 "5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330"
end
resource "zope.hookable" do
url "https://files.pythonhosted.org/packages/31/ce/4d1cd6d2a3d980989ceaf86abb6c8683f90f01a81861f3de6ec7ae317db7/zope.hookable-5.0.1.tar.gz"
sha256 "29d07681a78042cdd15b268ae9decffed9ace68a53eebeb61d65ae931d158841"
end
resource "zope.interface" do
url "https://files.pythonhosted.org/packages/84/21/80cdc749908ebf2719a9063eddcc02b668fbc62d200c1f1a4d92aaaba76b/zope.interface-5.2.0.tar.gz"
sha256 "8251f06a77985a2729a8bdbefbae79ee78567dddc3acbd499b87e705ca59fe24"
end
resource "zope.proxy" do
url "https://files.pythonhosted.org/packages/ab/37/26899cb231ecfa04822a17a669eac6df7ef0c2a86e2b78001db0cd3edd46/zope.proxy-4.3.5.tar.gz"
sha256 "a66a0d94e5b081d5d695e66d6667e91e74d79e273eee95c1747717ba9cb70792"
end
def install
xy = Language::Python.major_minor_version "python3"
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python#{xy}/site-packages"
resources.each do |r|
r.stage do
system "python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
# Install in our prefix, not the first-in-the-path python site-packages dir.
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python#{xy}/site-packages"
cd buildpath/"certbot" do
system "python3", *Language::Python.setup_install_args(libexec)
end
# Shipped with certbot, not external resources.
%w[acme certbot-apache certbot-nginx].each do |r|
cd buildpath/r do
system "python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
pkgshare.install buildpath/"certbot/examples"
bin.install Dir[libexec/"bin/certbot"]
bin.env_script_all_files(libexec/"bin", PYTHONPATH: ENV["PYTHONPATH"])
end
test do
assert_match version.to_s, pipe_output("#{bin}/certbot --version 2>&1")
# This throws a bad exit code but we can check it actually is failing
# for the right reasons by asserting. --version never fails even if
# resources are missing or outdated/too new/etc.
if Process.uid.zero?
assert_match "Saving debug log", shell_output("#{bin}/certbot 2>&1", 1)
else
assert_match "Either run as root", shell_output("#{bin}/certbot 2>&1", 1)
end
end
end
| 45.972222 | 149 | 0.80574 |
2840435348bb352012582ddb630c2824a4f4b36d | 185 | def login_as(user)
visit '/users/sign_in'
within("#new_account") do
fill_in 'Email', with: user.email
fill_in 'Password', with: 'password'
end
click_button 'Log in'
end
| 20.555556 | 40 | 0.681081 |
6a8a18dfd6c1f187865fa96acc78bc8d1afb4530 | 611 | module HomePageSteps
step 'ми відкрили початкову сторінку' do
visit '/'
end
step 'в навігаційному меню я хочу бачити посилання на розділи:' do |categories|
categories.each do |category|
within('.main-menu') do
find_link(category[0]) != nil
end
end
end
step 'ми маємо бачити кількість елементів у відповідному розділі' do
Category.all.each do |category|
within('.main-menu') do
if category.count > 0
expect(page).to have_content category.count.to_s
end
end
end
end
end
RSpec.configure { |c| c.include HomePageSteps }
| 21.821429 | 81 | 0.653028 |
b9d6948359d9d2ff7075de2370b194ca9da671e2 | 617 | class ResultGroup < AactRecord
include BelongsToStudy
has_many :reported_events, dependent: :restrict_with_exception
has_many :milestones, dependent: :restrict_with_exception
has_many :drop_withdrawals, dependent: :restrict_with_exception
has_many :baseline_measures, dependent: :restrict_with_exception
has_many :outcome_counts, dependent: :restrict_with_exception
has_many :outcome_measurements, dependent: :restrict_with_exception
has_many :outcome_analysis_groups, inverse_of: :result_group, dependent: :restrict_with_exception
has_many :outcome_analyses, through: :outcome_analysis_groups
end
| 47.461538 | 99 | 0.844408 |
aba68d6db2ff543fa52c5f44fda0e537934d58d0 | 15,609 | require "fileutils"
require "digest/md5"
require "active_support/core_ext/string/strip"
require "rails/version" unless defined?(Rails::VERSION)
require "open-uri"
require "uri"
require "rails/generators"
require "active_support/core_ext/array/extract_options"
module Rails
module Generators
class AppBase < Base # :nodoc:
DATABASES = %w( mysql postgresql sqlite3 oracle frontbase ibm_db sqlserver )
JDBC_DATABASES = %w( jdbcmysql jdbcsqlite3 jdbcpostgresql jdbc )
DATABASES.concat(JDBC_DATABASES)
attr_accessor :rails_template
add_shebang_option!
argument :app_path, type: :string
def self.strict_args_position
false
end
def self.add_shared_options_for(name)
class_option :template, type: :string, aliases: "-m",
desc: "Path to some #{name} template (can be a filesystem path or URL)"
class_option :database, type: :string, aliases: "-d", default: "sqlite3",
desc: "Preconfigure for selected database (options: #{DATABASES.join('/')})"
class_option :javascript, type: :string, aliases: "-j",
desc: "Preconfigure for selected JavaScript library"
class_option :skip_yarn, type: :boolean, default: false,
desc: "Don't use Yarn for managing JavaScript dependencies"
class_option :skip_gemfile, type: :boolean, default: false,
desc: "Don't create a Gemfile"
class_option :skip_bundle, type: :boolean, aliases: "-B", default: false,
desc: "Don't run bundle install"
class_option :skip_git, type: :boolean, aliases: "-G", default: false,
desc: "Skip .gitignore file"
class_option :skip_keeps, type: :boolean, default: false,
desc: "Skip source control .keep files"
class_option :skip_action_mailer, type: :boolean, aliases: "-M",
default: false,
desc: "Skip Action Mailer files"
class_option :skip_active_record, type: :boolean, aliases: "-O", default: false,
desc: "Skip Active Record files"
class_option :skip_puma, type: :boolean, aliases: "-P", default: false,
desc: "Skip Puma related files"
class_option :skip_action_cable, type: :boolean, aliases: "-C", default: false,
desc: "Skip Action Cable files"
class_option :skip_sprockets, type: :boolean, aliases: "-S", default: false,
desc: "Skip Sprockets files"
class_option :skip_spring, type: :boolean, default: false,
desc: "Don't install Spring application preloader"
class_option :skip_listen, type: :boolean, default: false,
desc: "Don't generate configuration that depends on the listen gem"
class_option :skip_coffee, type: :boolean, default: false,
desc: "Don't use CoffeeScript"
class_option :skip_javascript, type: :boolean, aliases: "-J", default: false,
desc: "Skip JavaScript files"
class_option :skip_turbolinks, type: :boolean, default: false,
desc: "Skip turbolinks gem"
class_option :skip_test, type: :boolean, aliases: "-T", default: false,
desc: "Skip test files"
class_option :dev, type: :boolean, default: false,
desc: "Setup the #{name} with Gemfile pointing to your Rails checkout"
class_option :edge, type: :boolean, default: false,
desc: "Setup the #{name} with Gemfile pointing to Rails repository"
class_option :rc, type: :string, default: nil,
desc: "Path to file containing extra configuration options for rails command"
class_option :no_rc, type: :boolean, default: false,
desc: "Skip loading of extra configuration options from .railsrc file"
class_option :help, type: :boolean, aliases: "-h", group: :rails,
desc: "Show this help message and quit"
end
def initialize(*args)
@gem_filter = lambda { |gem| true }
@extra_entries = []
super
convert_database_option_for_jruby
end
protected
def gemfile_entry(name, *args)
options = args.extract_options!
version = args.first
github = options[:github]
path = options[:path]
if github
@extra_entries << GemfileEntry.github(name, github)
elsif path
@extra_entries << GemfileEntry.path(name, path)
else
@extra_entries << GemfileEntry.version(name, version)
end
self
end
def gemfile_entries
[rails_gemfile_entry,
database_gemfile_entry,
webserver_gemfile_entry,
assets_gemfile_entry,
javascript_gemfile_entry,
jbuilder_gemfile_entry,
psych_gemfile_entry,
cable_gemfile_entry,
@extra_entries].flatten.find_all(&@gem_filter)
end
def add_gem_entry_filter
@gem_filter = lambda { |next_filter, entry|
yield(entry) && next_filter.call(entry)
}.curry[@gem_filter]
end
def builder
@builder ||= begin
builder_class = get_builder_class
builder_class.include(ActionMethods)
builder_class.new(self)
end
end
def build(meth, *args)
builder.send(meth, *args) if builder.respond_to?(meth)
end
def create_root
valid_const?
empty_directory "."
FileUtils.cd(destination_root) unless options[:pretend]
end
def apply_rails_template
apply rails_template if rails_template
rescue Thor::Error, LoadError, Errno::ENOENT => e
raise Error, "The template [#{rails_template}] could not be loaded. Error: #{e}"
end
def set_default_accessors!
self.destination_root = File.expand_path(app_path, destination_root)
self.rails_template = \
case options[:template]
when /^https?:\/\//
options[:template]
when String
File.expand_path(options[:template], Dir.pwd)
else
options[:template]
end
end
def database_gemfile_entry
return [] if options[:skip_active_record]
gem_name, gem_version = gem_for_database
GemfileEntry.version gem_name, gem_version,
"Use #{options[:database]} as the database for Active Record"
end
def webserver_gemfile_entry
return [] if options[:skip_puma]
comment = "Use Puma as the app server"
GemfileEntry.new("puma", "~> 3.0", comment)
end
def include_all_railties?
options.values_at(:skip_active_record, :skip_action_mailer, :skip_test, :skip_sprockets, :skip_action_cable).none?
end
def comment_if(value)
options[value] ? "# " : ""
end
def keeps?
!options[:skip_keeps]
end
def sqlite3?
!options[:skip_active_record] && options[:database] == "sqlite3"
end
class GemfileEntry < Struct.new(:name, :version, :comment, :options, :commented_out)
def initialize(name, version, comment, options = {}, commented_out = false)
super
end
def self.github(name, github, branch = nil, comment = nil)
if branch
new(name, nil, comment, github: github, branch: branch)
else
new(name, nil, comment, github: github)
end
end
def self.version(name, version, comment = nil)
new(name, version, comment)
end
def self.path(name, path, comment = nil)
new(name, nil, comment, path: path)
end
def version
version = super
if version.is_a?(Array)
version.join("', '")
else
version
end
end
end
def rails_gemfile_entry
dev_edge_common = [
]
if options.dev?
[
GemfileEntry.path("rails", Rails::Generators::RAILS_DEV_PATH)
] + dev_edge_common
elsif options.edge?
[
GemfileEntry.github("rails", "rails/rails")
] + dev_edge_common
else
[GemfileEntry.version("rails",
rails_version_specifier,
"Bundle edge Rails instead: gem 'rails', github: 'rails/rails'")]
end
end
def rails_version_specifier(gem_version = Rails.gem_version)
if gem_version.prerelease?
next_series = gem_version
next_series = next_series.bump while next_series.segments.size > 2
[">= #{gem_version}", "< #{next_series}"]
elsif gem_version.segments.size == 3
"~> #{gem_version}"
else
patch = gem_version.segments[0, 3].join(".")
["~> #{patch}", ">= #{gem_version}"]
end
end
def gem_for_database
# %w( mysql postgresql sqlite3 oracle frontbase ibm_db sqlserver jdbcmysql jdbcsqlite3 jdbcpostgresql )
case options[:database]
when "mysql" then ["mysql2", [">= 0.3.18", "< 0.5"]]
when "postgresql" then ["pg", ["~> 0.18"]]
when "oracle" then ["ruby-oci8", nil]
when "frontbase" then ["ruby-frontbase", nil]
when "sqlserver" then ["activerecord-sqlserver-adapter", nil]
when "jdbcmysql" then ["activerecord-jdbcmysql-adapter", nil]
when "jdbcsqlite3" then ["activerecord-jdbcsqlite3-adapter", nil]
when "jdbcpostgresql" then ["activerecord-jdbcpostgresql-adapter", nil]
when "jdbc" then ["activerecord-jdbc-adapter", nil]
else [options[:database], nil]
end
end
def convert_database_option_for_jruby
if defined?(JRUBY_VERSION)
case options[:database]
when "postgresql" then options[:database].replace "jdbcpostgresql"
when "mysql" then options[:database].replace "jdbcmysql"
when "oracle" then options[:database].replace "jdbc"
when "sqlite3" then options[:database].replace "jdbcsqlite3"
end
end
end
def assets_gemfile_entry
return [] if options[:skip_sprockets]
gems = []
gems << GemfileEntry.github("sass-rails", "rails/sass-rails", nil,
"Use SCSS for stylesheets")
if !options[:skip_javascript]
gems << GemfileEntry.version("uglifier",
">= 1.3.0",
"Use Uglifier as compressor for JavaScript assets")
end
gems
end
def jbuilder_gemfile_entry
comment = "Build JSON APIs with ease. Read more: https://github.com/rails/jbuilder"
GemfileEntry.new "jbuilder", "~> 2.5", comment, {}, options[:api]
end
def coffee_gemfile_entry
GemfileEntry.version "coffee-rails", "~> 4.2", "Use CoffeeScript for .coffee assets and views"
end
def javascript_gemfile_entry
if options[:skip_javascript] || options[:skip_sprockets]
[]
else
gems = [javascript_runtime_gemfile_entry]
gems << coffee_gemfile_entry unless options[:skip_coffee]
if options[:javascript]
gems << GemfileEntry.version("#{options[:javascript]}-rails", nil,
"Use #{options[:javascript]} as the JavaScript library")
end
unless options[:skip_turbolinks]
gems << GemfileEntry.version("turbolinks", "~> 5",
"Turbolinks makes navigating your web application faster. Read more: https://github.com/turbolinks/turbolinks")
end
gems
end
end
def javascript_runtime_gemfile_entry
comment = "See https://github.com/rails/execjs#readme for more supported runtimes"
if defined?(JRUBY_VERSION)
GemfileEntry.version "therubyrhino", nil, comment
else
GemfileEntry.new "therubyracer", nil, comment, { platforms: :ruby }, true
end
end
def psych_gemfile_entry
return [] unless defined?(Rubinius)
comment = "Use Psych as the YAML engine, instead of Syck, so serialized " \
"data can be read safely from different rubies (see http://git.io/uuLVag)"
GemfileEntry.new("psych", "~> 2.0", comment, platforms: :rbx)
end
def cable_gemfile_entry
return [] if options[:skip_action_cable]
comment = "Use Redis adapter to run Action Cable in production"
gems = []
gems << GemfileEntry.new("redis", "~> 3.0", comment, {}, true)
gems
end
def bundle_command(command)
say_status :run, "bundle #{command}"
# We are going to shell out rather than invoking Bundler::CLI.new(command)
# because `rails new` loads the Thor gem and on the other hand bundler uses
# its own vendored Thor, which could be a different version. Running both
# things in the same process is a recipe for a night with paracetamol.
#
# We unset temporary bundler variables to load proper bundler and Gemfile.
#
# Thanks to James Tucker for the Gem tricks involved in this call.
_bundle_command = Gem.bin_path("bundler", "bundle")
require "bundler"
Bundler.with_clean_env do
full_command = %Q["#{Gem.ruby}" "#{_bundle_command}" #{command}]
if options[:quiet]
system(full_command, out: File::NULL)
else
system(full_command)
end
end
end
def bundle_install?
!(options[:skip_gemfile] || options[:skip_bundle] || options[:pretend])
end
def spring_install?
!options[:skip_spring] && !options.dev? && Process.respond_to?(:fork) && !RUBY_PLATFORM.include?("cygwin")
end
def depend_on_listen?
!options[:skip_listen] && os_supports_listen_out_of_the_box?
end
def os_supports_listen_out_of_the_box?
RbConfig::CONFIG["host_os"] =~ /darwin|linux/
end
def run_bundle
bundle_command("install") if bundle_install?
end
def generate_spring_binstubs
if bundle_install? && spring_install?
bundle_command("exec spring binstub --all")
end
end
def empty_directory_with_keep_file(destination, config = {})
empty_directory(destination, config)
keep_file(destination)
end
def keep_file(destination)
create_file("#{destination}/.keep") if keeps?
end
end
end
end
| 35.965438 | 124 | 0.568518 |
6a545d73b9cb53f2f896cc7f415caa2e6ebfcce0 | 496 | # frozen_string_literal: true
module ResourceRegistry
module Operations
module Features
# Enable a Feature
# @param [Symbol] name Name of feature to enable
# @param [Hash] options Options passed through to feature enable call
# @return result of the feature instance enable call
class Enable
send(:include, Dry::Monads[:result, :do])
def call(name:, _options: {})
feature(name).enable(args)
end
end
end
end
end
| 23.619048 | 75 | 0.641129 |
bfeaeb1c441da0a7fdb85530f77bb39b1f45b036 | 2,082 | module Textpow
class ScoreManager
POINT_DEPTH = 4
NESTING_DEPTH = 40
START_VALUE = 2 ** ( POINT_DEPTH * NESTING_DEPTH )
BASE = 2 ** POINT_DEPTH
def initialize
@scores = {}
end
def score search_scope, reference_scope
max = 0
search_scope.split( ',' ).each do |scope|
arrays = scope.split(/\B-/)
if arrays.size == 1
max = [max, score_term( arrays[0], reference_scope )].max
elsif arrays.size > 1
excluded = false
arrays[1..-1].each do |a|
if score_term( arrays[1], reference_scope ) > 0
excluded = true
break
end
end
max = [max, score_term( arrays[0], reference_scope )].max unless excluded
else
raise ParsingError, "Error in scope string: '#{search_scope}' #{arrays.size} is not a valid number of operands" if arrays.size < 1
end
end
max
end
private
def score_term search_scope, reference_scope
unless @scores[reference_scope] && @scores[reference_scope][search_scope]
@scores[reference_scope] ||= {}
@scores[reference_scope][search_scope] = score_array( search_scope.split(' '), reference_scope.split( ' ' ) )
end
@scores[reference_scope][search_scope]
end
def score_array search_array, reference_array
pending = search_array
current = reference_array.last
reg = Regexp.new( "^#{Regexp.escape( pending.last )}" )
multiplier = START_VALUE
result = 0
while pending.size > 0 && current
if reg =~ current
point_score = (2**POINT_DEPTH) - current.count( '.' ) + Regexp.last_match[0].count( '.' )
result += point_score * multiplier
pending.pop
reg = Regexp.new( "^#{Regexp.escape( pending.last )}" ) if pending.size > 0
end
multiplier = multiplier / BASE
reference_array.pop
current = reference_array.last
end
result = 0 if pending.size > 0
result
end
end
end
| 31.545455 | 140 | 0.588377 |
6ab229c49c30fe9284a03547a1c462292dcc5e8c | 1,835 | #
# Copyright 2021- Kentaro Hayashi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "cmetrics"
require "fluent/plugin/input"
require "fluent/plugin/node_exporter/collector"
module Fluent
module Plugin
module NodeExporter
class VmstatMetricsCollector < MetricsCollector
VMSTAT_ENTRIES_REGEX = /^(oom_kill|pgpg|pswp|pg.*fault).*/
def initialize(config={})
super(config)
@metrics = {}
vmstat_path = File.join(@procfs_path, "vmstat")
File.readlines(vmstat_path).each do |line|
if VMSTAT_ENTRIES_REGEX.match?(line)
key, _ = line.split(' ', 2)
@untyped = CMetrics::Untyped.new
@untyped.create("node", "vmstat", key, "#{vmstat_path} information field #{key}.")
@metrics[key.intern] = @untyped
end
end
end
def run
vmstat_update
end
def vmstat_update
vmstat_path = File.join(@procfs_path, "vmstat")
File.readlines(vmstat_path).each do |line|
if VMSTAT_ENTRIES_REGEX.match?(line)
key, value = line.split(' ', 2)
@metrics[key.intern].set(value.to_f)
end
end
end
def cmetrics
@metrics
end
end
end
end
end
| 29.126984 | 96 | 0.622888 |
ffb646f56f45534f0af744e4695b7580ec0a8c85 | 2,414 | require 'rails_helper'
RSpec.describe Event, type: :model do
describe 'Relations' do
it { is_expected.to belong_to(:item) }
it { is_expected.to embed_many(:dispatches) }
end
describe 'Fields' do
it { is_expected.to have_timestamps }
it { is_expected.to have_field(:item_id).of_type(Object) }
it { is_expected.to have_field(:event_type_cd).of_type(String) }
it { is_expected.to have_field(:message).of_type(String) }
it { is_expected.to have_field(:data).of_type(Hash) }
it { is_expected.to have_field(:webhook_data).of_type(Hash) }
end
describe 'Indexes' do
it { is_expected.to have_index_for(item_id: 1) }
it { is_expected.to have_index_for(event_type_cd: 1) }
it { is_expected.to have_index_for(created_at: -1) }
end
describe 'Configurations' do
it { is_expected.to be_mongoid_document }
it 'has event_type enum' do
enum = {
'item_added' => 'item_added',
'item_changed' => 'item_changed'
}
expect(described_class.event_types.hash).to eq(enum)
end
end
describe 'Validations' do
it { is_expected.to validate_presence_of(:item_id) }
it { is_expected.to validate_presence_of(:event_type_cd) }
it { is_expected.to validate_presence_of(:data) }
it { is_expected.to validate_presence_of(:message) }
end
describe 'Callbacks' do
describe 'Before validation' do
it 'fills message' do
event = build(:event)
message = Faker::Lorem.word
allow(BuildEventMessage).to receive(:run!).with(event: event).and_return(message)
event.validate
expect(event.message).to eq message
end
it 'fills webhook_data' do
event = build(:event)
webhook_data = Hash[*Faker::Lorem.words(number: 4)]
allow(BuildEventWebhookData).to receive(:run!).with(event: event).and_return(webhook_data)
event.validate
expect(event.webhook_data).to eq webhook_data
end
end
describe 'After create' do
it 'creates a twitter pending dispatches' do
event = build(:event)
expect { event.save }.to change(event.dispatches, :count).by(2)
expect(event.dispatches.first).to be_pending
expect(event.dispatches.first.target).to eq :twitter
expect(event.dispatches.last).to be_pending
expect(event.dispatches.last.target).to eq :discord
end
end
end
end
| 31.350649 | 98 | 0.673571 |
3355600b921da0c41df7796c8d3352da54cdb733 | 3,032 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_03_01
module Models
#
# An array of private link service id that can be linked to a private end
# point with auto approved.
#
class AutoApprovedPrivateLinkServicesResult
include MsRestAzure
include MsRest::JSONable
# @return [Array<AutoApprovedPrivateLinkService>] An array of auto
# approved private link service.
attr_accessor :value
# @return [String] The URL to get the next set of results.
attr_accessor :next_link
# return [Proc] with next page method call.
attr_accessor :next_method
#
# Gets the rest of the items for the request, enabling auto-pagination.
#
# @return [Array<AutoApprovedPrivateLinkService>] operation results.
#
def get_all_items
items = @value
page = self
while page.next_link != nil && !page.next_link.strip.empty? do
page = page.get_next_page
items.concat(page.value)
end
items
end
#
# Gets the next page of results.
#
# @return [AutoApprovedPrivateLinkServicesResult] with next page content.
#
def get_next_page
response = @next_method.call(@next_link).value! unless @next_method.nil?
unless response.nil?
@next_link = response.body.next_link
@value = response.body.value
self
end
end
#
# Mapper for AutoApprovedPrivateLinkServicesResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'AutoApprovedPrivateLinkServicesResult',
type: {
name: 'Composite',
class_name: 'AutoApprovedPrivateLinkServicesResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'AutoApprovedPrivateLinkServiceElementType',
type: {
name: 'Composite',
class_name: 'AutoApprovedPrivateLinkService'
}
}
}
},
next_link: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 29.72549 | 83 | 0.550462 |
38209ad2d3e765341c5cd8efebc180684345d789 | 951 | require 'zoho_invoice/base'
module ZohoInvoice
class Contact < Base
READ_ATTRIBUTES = [
:contact_id,
:contact_name,
:company_name,
:website,
:currency_id,
:first_name,
:last_name,
:address,
:email,
:phone,
:mobile,
:contact_type,
:billing_address,
:shipping_address,
:contact_persons,
:notes,
:created_time,
:last_modified_time,
:primary_contact_id,
:payment_terms,
:payment_terms_label,
:status,
:custom_fields
]
CREATE_UPDATE_ATTRIBUTES = READ_ATTRIBUTES - [:contact_id]
define_object_attrs(*READ_ATTRIBUTES)
has_many :custom_fields
def self.all(client)
retrieve(client, '/api/v3/contacts')
end
def self.find(client, id, options={})
retrieve(client, "/api/v3/contacts/#{id}", false)
end
end
end
| 19.408163 | 62 | 0.574132 |
b9836258b5f3caf02b4bb0b2b3f4245d1e1ca84e | 1,444 | require 'mini_magick'
module SliceRename
class Slicer
def self.slice_image(config)
extension = File.extname config.path
name = File.basename config.path, extension
path = File.dirname config.path
i = 0
for y in 0..(config.rows - 1)
for x in 0..(config.columns - 1)
# Not sure why we need to reload the image each time.
image = open_image config.path
unless config.suffixes[i].nil?
pos_x = (x * config.width) + (x * config.padding) + ((x + 1) * config.padding)
pos_y = (y * config.height) + (y * config.padding) + ((y + 1) * config.padding)
if config.collapse_padding
pos_x -= (x * config.padding)
pos_y -= (y * config.padding)
end
crop = "#{config.width}x#{config.height}+#{pos_x}+#{pos_y}"
output_name = "#{path}/#{name}#{config.suffixes[i]}#{extension}"
if config.debug
puts "Output: #{output_name}"
puts "Crop: #{crop}"
end
save_slice image, output_name, crop
end
i += 1
end
end
end
private
def self.open_image(path)
MiniMagick::Image.open path
end
def self.save_slice(image, file_name, cropping)
image.combine_options do |c|
c.crop cropping
c.repage.+
end
image.write file_name
end
end
end
| 25.333333 | 91 | 0.546399 |
61d039c4be5e1a36abf1c534068fcb41e865c226 | 1,390 | class Libmicrohttpd < Formula
desc "Light HTTP/1.1 server library"
homepage "https://www.gnu.org/software/libmicrohttpd/"
url "https://ftp.gnu.org/gnu/libmicrohttpd/libmicrohttpd-0.9.62.tar.gz"
mirror "https://ftpmirror.gnu.org/libmicrohttpd/libmicrohttpd-0.9.62.tar.gz"
sha256 "bd3e097d703f5091a6a01b56c0464a90fdd17f5d50478cea50a346b25c88db49"
revision 1
bottle do
cellar :any
sha256 "e2a6d6aba4b8a259cafd558e58d7424e1b2ddbd2afbc6b98bbc2ef3617971cd2" => :mojave
sha256 "d76e6ca726c0067832ee7ca266bb856b8b6a9065c6e94910fbd6aeb1428478c8" => :high_sierra
sha256 "9085e50cae90cfd889f001092af2ed63f26078b9f9683b945a281f00b5fd9317" => :sierra
sha256 "a0cd78b8f4aab6175fbc10c60ddb0275ac3b5e4cc468571d2d905a02711c9162" => :x86_64_linux
end
depends_on "gnutls"
depends_on "libgcrypt"
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
pkgshare.install "doc/examples"
end
test do
cp pkgshare/"examples/simplepost.c", testpath
inreplace "simplepost.c",
"return 0",
"printf(\"daemon %p\", daemon) ; return 0"
system ENV.cc, "-o", "foo", "simplepost.c", "-I#{include}", "-L#{lib}", "-lmicrohttpd"
assert_match /daemon 0x[0-9a-f]+[1-9a-f]+/, pipe_output("./foo")
end
end
| 37.567568 | 94 | 0.705036 |
333850b6d09af61a26c715ae0d8176cac9483ef8 | 169 | class CreateInterests < ActiveRecord::Migration[6.0]
def change
create_table :interests do |t|
t.string :interest_name
t.timestamps
end
end
end
| 16.9 | 52 | 0.686391 |
794625dd870afa9ad10c6cb023232da33bff43b2 | 3,256 | class Gnupg < Formula
desc "GNU Pretty Good Privacy (PGP) package"
homepage "https://gnupg.org/"
url "https://gnupg.org/ftp/gcrypt/gnupg/gnupg-2.2.10.tar.bz2"
sha256 "799dd37a86a1448732e339bd20440f4f5ee6e69755f6fd7a73ee8af30840c915"
bottle do
sha256 "7190eeef3372dec0d663f29f59fdc192b4f2b684b00b684405a3fd086b3fddcf" => :mojave
sha256 "a6c89bd1575cd29b96454f64f782b321105725a6e35228724f3654403c9a47f4" => :high_sierra
sha256 "2ec723b779f410e4facb11c0523fe3ce1a1b31514228d348857e06ae02d70188" => :sierra
sha256 "92b34de0e0713e1a5179a5c82ee4aec1579d798a6a2e5772db2716f30d791d9b" => :el_capitan
end
option "with-gpgsplit", "Additionally install the gpgsplit utility"
option "with-gpg-zip", "Additionally install the gpg-zip utility"
option "with-large-secmem", "Additionally allocate extra secure memory"
option "without-libusb", "Disable the internal CCID driver"
deprecated_option "without-libusb-compat" => "without-libusb"
depends_on "pkg-config" => :build
depends_on "sqlite" => :build if MacOS.version == :mavericks
depends_on "npth"
depends_on "gnutls"
depends_on "libgpg-error"
depends_on "libgcrypt"
depends_on "libksba"
depends_on "libassuan"
depends_on "pinentry"
depends_on "gettext"
depends_on "adns"
depends_on "libusb" => :recommended
depends_on "readline" => :optional
depends_on "encfs" => :optional
def install
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--sbindir=#{bin}
--sysconfdir=#{etc}
--enable-symcryptrun
--with-pinentry-pgm=#{Formula["pinentry"].opt_bin}/pinentry
--enable-all-tests
]
args << "--disable-ccid-driver" if build.without? "libusb"
args << "--with-readline=#{Formula["readline"].opt_prefix}" if build.with? "readline"
args << "--enable-large-secmem" if build.with? "large-secmem"
system "./configure", *args
system "make"
system "make", "check"
system "make", "install"
bin.install "tools/gpgsplit" if build.with? "gpgsplit"
bin.install "tools/gpg-zip" if build.with? "gpg-zip"
end
def post_install
(var/"run").mkpath
quiet_system "killall", "gpg-agent"
end
def caveats; <<~EOS
Once you run this version of gpg you may find it difficult to return to using
a prior 1.4.x or 2.0.x. Most notably the prior versions will not automatically
know about new secret keys created or imported by this version. We recommend
creating a backup of your `~/.gnupg` prior to first use.
For full details on each change and how it could impact you please see
https://www.gnupg.org/faq/whats-new-in-2.1.html
EOS
end
test do
(testpath/"batch.gpg").write <<~EOS
Key-Type: RSA
Key-Length: 2048
Subkey-Type: RSA
Subkey-Length: 2048
Name-Real: Testing
Name-Email: [email protected]
Expire-Date: 1d
%no-protection
%commit
EOS
begin
system bin/"gpg", "--batch", "--gen-key", "batch.gpg"
(testpath/"test.txt").write "Hello World!"
system bin/"gpg", "--detach-sign", "test.txt"
system bin/"gpg", "--verify", "test.txt.sig"
ensure
system bin/"gpgconf", "--kill", "gpg-agent"
end
end
end
| 32.888889 | 93 | 0.687961 |
bf425a2617c7065339894513510d0911877b7b03 | 7,131 | require 'rails_helper'
describe Clusters::Applications::Knative do
include KubernetesHelpers
include ReactiveCachingHelpers
let(:knative) { create(:clusters_applications_knative) }
include_examples 'cluster application core specs', :clusters_applications_knative
include_examples 'cluster application status specs', :clusters_applications_knative
include_examples 'cluster application helm specs', :clusters_applications_knative
include_examples 'cluster application version specs', :clusters_applications_knative
include_examples 'cluster application initial status specs'
before do
allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
end
describe 'when rbac is not enabled' do
let(:cluster) { create(:cluster, :provided_by_gcp, :rbac_disabled) }
let(:knative_no_rbac) { create(:clusters_applications_knative, cluster: cluster) }
it { expect(knative_no_rbac).to be_not_installable }
end
describe 'make_installed with external_ip' do
before do
application.make_installed!
end
let(:application) { create(:clusters_applications_knative, :installing) }
it 'schedules a ClusterWaitForIngressIpAddressWorker' do
expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_in)
.with(Clusters::Applications::Knative::FETCH_IP_ADDRESS_DELAY, 'knative', application.id)
end
end
describe '#schedule_status_update with external_ip' do
let(:application) { create(:clusters_applications_knative, :installed) }
before do
application.schedule_status_update
end
it 'schedules a ClusterWaitForIngressIpAddressWorker' do
expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_async)
.with('knative', application.id)
end
context 'when the application is not installed' do
let(:application) { create(:clusters_applications_knative, :installing) }
it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do
expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_async)
end
end
context 'when there is already an external_ip' do
let(:application) { create(:clusters_applications_knative, :installed, external_ip: '111.222.222.111') }
it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do
expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_in)
end
end
context 'when there is already an external_hostname' do
let(:application) { create(:clusters_applications_knative, :installed, external_hostname: 'localhost.localdomain') }
it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do
expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_in)
end
end
end
shared_examples 'a command' do
it 'should be an instance of Helm::InstallCommand' do
expect(subject).to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand)
end
it 'should be initialized with knative arguments' do
expect(subject.name).to eq('knative')
expect(subject.chart).to eq('knative/knative')
expect(subject.files).to eq(knative.files)
end
it 'should not install metrics for prometheus' do
expect(subject.postinstall).to be_nil
end
context 'with prometheus installed' do
let(:prometheus) { create(:clusters_applications_prometheus, :installed) }
let(:knative) { create(:clusters_applications_knative, cluster: prometheus.cluster) }
subject { knative.install_command }
it 'should install metrics' do
expect(subject.postinstall).not_to be_nil
expect(subject.postinstall.length).to be(1)
expect(subject.postinstall[0]).to eql("kubectl apply -f #{Clusters::Applications::Knative::METRICS_CONFIG}")
end
end
end
describe '#install_command' do
subject { knative.install_command }
it 'should be initialized with latest version' do
expect(subject.version).to eq('0.2.2')
end
it_behaves_like 'a command'
end
describe '#update_command' do
let!(:current_installed_version) { knative.version = '0.1.0' }
subject { knative.update_command }
it 'should be initialized with current version' do
expect(subject.version).to eq(current_installed_version)
end
it_behaves_like 'a command'
end
describe '#files' do
let(:application) { knative }
let(:values) { subject[:'values.yaml'] }
subject { application.files }
it 'should include knative specific keys in the values.yaml file' do
expect(values).to include('domain')
end
end
describe 'validations' do
it { is_expected.to validate_presence_of(:hostname) }
end
describe '#service_pod_details' do
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:service) { cluster.platform_kubernetes }
let(:knative) { create(:clusters_applications_knative, cluster: cluster) }
let(:namespace) do
create(:cluster_kubernetes_namespace,
cluster: cluster,
cluster_project: cluster.cluster_project,
project: cluster.cluster_project.project)
end
before do
stub_kubeclient_discover(service.api_url)
stub_kubeclient_knative_services
stub_kubeclient_service_pods
stub_reactive_cache(knative,
{
services: kube_response(kube_knative_services_body),
pods: kube_response(kube_knative_pods_body(cluster.cluster_project.project.name, namespace.namespace))
})
synchronous_reactive_cache(knative)
end
it 'should be able k8s core for pod details' do
expect(knative.service_pod_details(namespace.namespace, cluster.cluster_project.project.name)).not_to be_nil
end
end
describe '#services' do
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:service) { cluster.platform_kubernetes }
let(:knative) { create(:clusters_applications_knative, cluster: cluster) }
let(:namespace) do
create(:cluster_kubernetes_namespace,
cluster: cluster,
cluster_project: cluster.cluster_project,
project: cluster.cluster_project.project)
end
subject { knative.services }
before do
stub_kubeclient_discover(service.api_url)
stub_kubeclient_knative_services
stub_kubeclient_service_pods
end
it 'should have an unintialized cache' do
is_expected.to be_nil
end
context 'when using synchronous reactive cache' do
before do
stub_reactive_cache(knative,
{
services: kube_response(kube_knative_services_body),
pods: kube_response(kube_knative_pods_body(cluster.cluster_project.project.name, namespace.namespace))
})
synchronous_reactive_cache(knative)
end
it 'should have cached services' do
is_expected.not_to be_nil
end
it 'should match our namespace' do
expect(knative.services_for(ns: namespace)).not_to be_nil
end
end
end
end
| 33.167442 | 122 | 0.726406 |
38c7582d5917ffdc28f1b25766dcebb0577e2593 | 51 | module Cobregratis
class Service < Base
end
end | 12.75 | 22 | 0.764706 |
214b3d3ee446fe2be58044437ef73edc9721e253 | 279 | class UberArray
def initialize(array)
@array = array
end
def each
i = 0
while i < @array.length
yield @array[i]
i += 1
end
end
end
UberArray.new([1, 2, 3, 4, 5, 6]).each do |x|
puts "element #{x}"
end
| 15.5 | 45 | 0.476703 |
081bd48891f3686df555ce30c3f4042b99a8eca2 | 178 | json.extract! possible_answer, :id, :question_id, :order, :answer_text, :is_text, :boolean, :created_at, :updated_at
json.url possible_answer_url(possible_answer, format: :json)
| 59.333333 | 116 | 0.786517 |
5dcc40a4cccd4cd627f918090700641c429f0956 | 1,035 | require 'cheer_up/version'
require 'Thor'
module CheerUp
class Cli < Thor
desc 'up "name"', 'cheers you up with a name if you provide one'
long_desc <<-MEH
A sarcastic phrase spoken by the Terminal, which probably
won't cheer whoever it's directed at up.
MEH
def up(name = nil)
phrases = [
"My heart bleeds for you",
"No one cares",
"Get over yourself",
"I'm playing the world's saddest song on the world's tiniest violin, just for you",
"Everything's gonna be alright",
"Cry me a river",
"Oh dearie me",
"It might never happen",
"As a wise fish once said, don't worry be happy",
"Calm down, calm down",
"Always look on the bright side of life"
]
cheer_up = "#{phrases.sample} #{name}".strip
system("say \"#{cheer_up}\"") if osx?
puts "#{cheer_up}"
end
no_commands do
def osx?
RbConfig::CONFIG['host_os'].downcase.include?('darwin')
end
end
end
end
| 26.538462 | 91 | 0.588406 |
e203aa0f7977f025e7723a55acf0542568dba2ed | 116 | class AdminController < ApplicationController
def index
# puts "in the controller"
# binding.pry
end
end | 19.333333 | 45 | 0.724138 |
9172862411af8bbc609268f31b659ae5bc0ec4fb | 306 | # == Schema Information
#
# Table name: carts
#
# id :integer not null, primary key
# user_id :integer
# status :integer
# metadata :jsonb
# created_at :datetime not null
# updated_at :datetime not null
#
FactoryGirl.define do
factory :cart do
end
end
| 17 | 53 | 0.604575 |
08304014de74d48ffef6a0c296913eaae9cb0bae | 312 | class CreatePosts < ActiveRecord::Migration[6.1]
def change
create_table :posts do |t|
t.string :header
t.string :color
t.string :title
t.string :design
t.string :body
t.integer :user_id
t.timestamps
end
add_index :posts, :user_id, unique: true
end
end
| 19.5 | 48 | 0.625 |
2699c97552c691b3e67cea0ad1e79aab5e496da8 | 8,125 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require_relative 'auto_scaling_policy'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# An autoscaling policy that defines execution schedules for an autoscaling configuration.
class Autoscaling::Models::ScheduledPolicy < Autoscaling::Models::AutoScalingPolicy
# **[Required]** The schedule for executing the autoscaling policy.
# @return [OCI::Autoscaling::Models::ExecutionSchedule]
attr_accessor :execution_schedule
# @return [OCI::Autoscaling::Models::ResourceAction]
attr_accessor :resource_action
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'capacity': :'capacity',
'id': :'id',
'display_name': :'displayName',
'policy_type': :'policyType',
'time_created': :'timeCreated',
'is_enabled': :'isEnabled',
'execution_schedule': :'executionSchedule',
'resource_action': :'resourceAction'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'capacity': :'OCI::Autoscaling::Models::Capacity',
'id': :'String',
'display_name': :'String',
'policy_type': :'String',
'time_created': :'DateTime',
'is_enabled': :'BOOLEAN',
'execution_schedule': :'OCI::Autoscaling::Models::ExecutionSchedule',
'resource_action': :'OCI::Autoscaling::Models::ResourceAction'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [OCI::Autoscaling::Models::Capacity] :capacity The value to assign to the {OCI::Autoscaling::Models::AutoScalingPolicy#capacity #capacity} proprety
# @option attributes [String] :id The value to assign to the {OCI::Autoscaling::Models::AutoScalingPolicy#id #id} proprety
# @option attributes [String] :display_name The value to assign to the {OCI::Autoscaling::Models::AutoScalingPolicy#display_name #display_name} proprety
# @option attributes [DateTime] :time_created The value to assign to the {OCI::Autoscaling::Models::AutoScalingPolicy#time_created #time_created} proprety
# @option attributes [BOOLEAN] :is_enabled The value to assign to the {OCI::Autoscaling::Models::AutoScalingPolicy#is_enabled #is_enabled} proprety
# @option attributes [OCI::Autoscaling::Models::ExecutionSchedule] :execution_schedule The value to assign to the {#execution_schedule} property
# @option attributes [OCI::Autoscaling::Models::ResourceAction] :resource_action The value to assign to the {#resource_action} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
attributes['policyType'] = 'scheduled'
super(attributes)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.execution_schedule = attributes[:'executionSchedule'] if attributes[:'executionSchedule']
raise 'You cannot provide both :executionSchedule and :execution_schedule' if attributes.key?(:'executionSchedule') && attributes.key?(:'execution_schedule')
self.execution_schedule = attributes[:'execution_schedule'] if attributes[:'execution_schedule']
self.resource_action = attributes[:'resourceAction'] if attributes[:'resourceAction']
raise 'You cannot provide both :resourceAction and :resource_action' if attributes.key?(:'resourceAction') && attributes.key?(:'resource_action')
self.resource_action = attributes[:'resource_action'] if attributes[:'resource_action']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
capacity == other.capacity &&
id == other.id &&
display_name == other.display_name &&
policy_type == other.policy_type &&
time_created == other.time_created &&
is_enabled == other.is_enabled &&
execution_schedule == other.execution_schedule &&
resource_action == other.resource_action
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[capacity, id, display_name, policy_type, time_created, is_enabled, execution_schedule, resource_action].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 41.454082 | 245 | 0.688985 |
01d29f6ec845fb28105871fd86528a5d2401079a | 266 | require "rubocop"
require "standard/rubocop/ext"
require "standard/version"
require "standard/cli"
require "standard/railtie" if defined?(Rails) && defined?(Rails::Railtie)
require "standard/formatter"
require "standard/cop/block_delimiters"
module Standard
end
| 19 | 73 | 0.785714 |
3991db085748504e105dad0700703b8229e0e3d9 | 143 | class Lion
attr_accessor :name, :sound
def initialize(attributes)
@name = attributes[:name]
@sound = attributes[:sound]
end
end
| 15.888889 | 31 | 0.685315 |
6a0a92bafd8909c0b9fedc10820924da9182aa0a | 2,741 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe SidekiqHelper do
describe 'parse_sidekiq_ps' do
it 'parses line with time' do
line = '55137 10,0 2,1 S+ 2:30pm sidekiq 4.1.4 gitlab [0 of 25 busy] '
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['55137', '10,0', '2,1', 'S+', '2:30pm', 'sidekiq 4.1.4 gitlab [0 of 25 busy]'])
end
it 'parses line with date' do
line = '55137 10,0 2,1 S+ Aug 4 sidekiq 4.1.4 gitlab [0 of 25 busy] '
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['55137', '10,0', '2,1', 'S+', 'Aug 4', 'sidekiq 4.1.4 gitlab [0 of 25 busy]'])
end
it 'parses line with two digit date' do
line = '55137 10,0 2,1 S+ Aug 04 sidekiq 4.1.4 gitlab [0 of 25 busy] '
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['55137', '10,0', '2,1', 'S+', 'Aug 04', 'sidekiq 4.1.4 gitlab [0 of 25 busy]'])
end
it 'parses line with dot as float separator' do
line = '55137 10.0 2.1 S+ 2:30pm sidekiq 4.1.4 gitlab [0 of 25 busy] '
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['55137', '10.0', '2.1', 'S+', '2:30pm', 'sidekiq 4.1.4 gitlab [0 of 25 busy]'])
end
it 'parses OSX output' do
line = ' 1641 1.5 3.8 S+ 4:04PM sidekiq 4.2.1 gitlab [0 of 25 busy]'
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['1641', '1.5', '3.8', 'S+', '4:04PM', 'sidekiq 4.2.1 gitlab [0 of 25 busy]'])
end
it 'parses Ubuntu output' do
# Ubuntu Linux 16.04 LTS / procps-3.3.10-4ubuntu2
line = ' 938 1.4 2.5 Sl+ 21:23:21 sidekiq 4.2.1 gitlab [0 of 25 busy] '
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['938', '1.4', '2.5', 'Sl+', '21:23:21', 'sidekiq 4.2.1 gitlab [0 of 25 busy]'])
end
it 'parses Debian output' do
# Debian Linux Wheezy/Jessie
line = '17725 1.0 12.1 Ssl 19:20:15 sidekiq 4.2.1 gitlab-rails [0 of 25 busy] '
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['17725', '1.0', '12.1', 'Ssl', '19:20:15', 'sidekiq 4.2.1 gitlab-rails [0 of 25 busy]'])
end
it 'parses OpenBSD output' do
# OpenBSD 6.1
line = '49258 0.5 2.3 R/0 Fri10PM ruby23: sidekiq 4.2.7 gitlab [0 of 25 busy] (ruby23)'
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['49258', '0.5', '2.3', 'R/0', 'Fri10PM', 'ruby23: sidekiq 4.2.7 gitlab [0 of 25 busy] (ruby23)'])
end
it 'does fail gracefully on line not matching the format' do
line = '55137 10.0 2.1 S+ 2:30pm something'
parts = helper.parse_sidekiq_ps(line)
expect(parts).to eq(['?', '?', '?', '?', '?', '?'])
end
end
end
| 37.040541 | 124 | 0.587377 |
1abcaf2fa8b0b0effbee145aadd24b094ea4f0ed | 6,798 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "concurrent"
require "monitor"
module Google
module Cloud
module PubSub
class Subscriber
##
# @private
class TimedUnaryBuffer
include MonitorMixin
attr_reader :max_bytes, :interval
def initialize subscriber, max_bytes: 10000000, interval: 1.0
super() # to init MonitorMixin
@subscriber = subscriber
@max_bytes = max_bytes
@interval = interval
# Using a Hash ensures there is only one entry for each ack_id in
# the buffer. Adding an entry again will overwrite the previous
# entry.
@register = {}
@task = Concurrent::TimerTask.new execution_interval: interval do
flush!
end
end
def acknowledge ack_ids
return if ack_ids.empty?
synchronize do
ack_ids.each do |ack_id|
# ack has no deadline set, use :ack indicate it is an ack
@register[ack_id] = :ack
end
end
true
end
def modify_ack_deadline deadline, ack_ids
return if ack_ids.empty?
synchronize do
ack_ids.each do |ack_id|
@register[ack_id] = deadline
end
end
true
end
def renew_lease deadline, ack_ids
return if ack_ids.empty?
synchronize do
ack_ids.each do |ack_id|
# Don't overwrite pending actions when renewing leased messages.
@register[ack_id] ||= deadline
end
end
true
end
def flush!
# Grab requests from the buffer and release synchronize ASAP
requests = flush_requests!
return if requests.empty?
# Perform the RCP calls concurrently
with_threadpool do |pool|
requests[:acknowledge].each do |ack_req|
add_future pool do
@subscriber.service.acknowledge \
ack_req.subscription, *ack_req.ack_ids
end
end
requests[:modify_ack_deadline].each do |mod_ack_req|
add_future pool do
@subscriber.service.modify_ack_deadline \
mod_ack_req.subscription, mod_ack_req.ack_ids,
mod_ack_req.ack_deadline_seconds
end
end
end
true
end
def start
@task.execute
self
end
def stop
@task.shutdown
flush!
self
end
def started?
@task.running?
end
def stopped?
!started?
end
private
def flush_requests!
prev_reg =
synchronize do
return {} if @register.empty?
reg = @register
@register = Concurrent::Map.new
reg
end
groups = prev_reg.each_pair.group_by { |_ack_id, delay| delay }
req_hash = Hash[groups.map { |k, v| [k, v.map(&:first)] }]
requests = { acknowledge: [] }
ack_ids = Array(req_hash.delete(:ack)) # ack has no deadline set
if ack_ids.any?
requests[:acknowledge] = create_acknowledge_requests ack_ids
end
requests[:modify_ack_deadline] =
req_hash.map do |mod_deadline, mod_ack_ids|
create_modify_ack_deadline_requests mod_deadline, mod_ack_ids
end.flatten
requests
end
def create_acknowledge_requests ack_ids
req = Google::Cloud::PubSub::V1::AcknowledgeRequest.new(
subscription: subscription_name,
ack_ids: ack_ids
)
addl_to_create = req.to_proto.bytesize / max_bytes
return [req] if addl_to_create.zero?
ack_ids.each_slice(addl_to_create + 1).map do |sliced_ack_ids|
Google::Cloud::PubSub::V1::AcknowledgeRequest.new(
subscription: subscription_name,
ack_ids: sliced_ack_ids
)
end
end
def create_modify_ack_deadline_requests deadline, ack_ids
req = Google::Cloud::PubSub::V1::ModifyAckDeadlineRequest.new(
subscription: subscription_name,
ack_ids: ack_ids,
ack_deadline_seconds: deadline
)
addl_to_create = req.to_proto.bytesize / max_bytes
return [req] if addl_to_create.zero?
ack_ids.each_slice(addl_to_create + 1).map do |sliced_ack_ids|
Google::Cloud::PubSub::V1::ModifyAckDeadlineRequest.new(
subscription: subscription_name,
ack_ids: sliced_ack_ids,
ack_deadline_seconds: deadline
)
end
end
def subscription_name
@subscriber.subscription_name
end
def push_threads
@subscriber.push_threads
end
def error! error
@subscriber.error! error
end
def with_threadpool
pool = Concurrent::CachedThreadPool.new \
max_threads: @subscriber.push_threads
yield pool
pool.shutdown
pool.wait_for_termination 60
return if pool.shutdown?
pool.kill
begin
raise "Timeout making subscriber API calls"
rescue StandardError => error
error! error
end
end
def add_future pool
Concurrent::Promises.future_on pool do
begin
yield
rescue StandardError => error
error! error
end
end
end
end
end
end
Pubsub = PubSub unless const_defined? :Pubsub
end
end
| 28.683544 | 80 | 0.53604 |
61f24fa4f31f3e5f8f9b2328aff7a0dafd04a1a1 | 566 | module RailsAdmin
module Extensions
module PaperTrail
class VersionProxy
def message
@message = @version.event
end
def version_id
@version.id
end
def remark
@remark = @version.remark
end
end
class AuditingAdapter
COLUMN_MAPPING = {
table: :item_type,
username: :whodunnit,
item: :item_id,
created_at: :created_at,
message: :event,
remark: :remark,
}.freeze
end
end
end
end
| 18.258065 | 35 | 0.519435 |
397deaa3d752cb93832464ca985f2f7b1c628dde | 1,482 | class Hive < Formula
desc "Hadoop-based data summarization, query, and analysis"
homepage "https://hive.apache.org"
url "https://www.apache.org/dyn/closer.lua?path=hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz"
mirror "https://archive.apache.org/dist/hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz"
sha256 "d75dcf36908b4e7b9b0ec9aec57a46a6628b97b276c233cb2c2f1a3e89b13462"
revision 1
bottle :unneeded
depends_on "hadoop"
depends_on :java => "1.8"
def install
rm_f Dir["bin/*.cmd", "bin/ext/*.cmd", "bin/ext/util/*.cmd"]
libexec.install %w[bin conf examples hcatalog lib scripts]
# Hadoop currently supplies a newer version
# and two versions on the classpath causes problems
rm libexec/"lib/guava-19.0.jar"
guava = (Formula["hadoop"].opt_libexec/"share/hadoop/common/lib").glob("guava-*-jre.jar")
ln_s guava.first, libexec/"lib"
Pathname.glob("#{libexec}/bin/*") do |file|
next if file.directory?
(bin/file.basename).write_env_script file,
Language::Java.java_home_env("1.7+").merge(:HIVE_HOME => libexec)
end
end
def caveats
<<~EOS
Hadoop must be in your path for hive executable to work.
If you want to use HCatalog with Pig, set $HCAT_HOME in your profile:
export HCAT_HOME=#{opt_libexec}/hcatalog
EOS
end
test do
system bin/"schematool", "-initSchema", "-dbType", "derby"
assert_match "Hive #{version}", shell_output("#{bin}/hive --version")
end
end
| 32.217391 | 95 | 0.686235 |
7a017beac9832ef3cd8d754027e60c2fc0af5049 | 8,381 | =begin
#Aspose.Diagram Cloud API Reference
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 3.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.3.0-SNAPSHOT
=end
require 'date'
module AsposeDiagramCloud
class PdfEncryptionDetails
attr_accessor :permissions
attr_accessor :encryption_algorithm
attr_accessor :user_password
attr_accessor :owner_password
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'permissions' => :'Permissions',
:'encryption_algorithm' => :'EncryptionAlgorithm',
:'user_password' => :'UserPassword',
:'owner_password' => :'OwnerPassword'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'permissions' => :'String',
:'encryption_algorithm' => :'String',
:'user_password' => :'String',
:'owner_password' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'Permissions')
self.permissions = attributes[:'Permissions']
end
if attributes.has_key?(:'EncryptionAlgorithm')
self.encryption_algorithm = attributes[:'EncryptionAlgorithm']
end
if attributes.has_key?(:'UserPassword')
self.user_password = attributes[:'UserPassword']
end
if attributes.has_key?(:'OwnerPassword')
self.owner_password = attributes[:'OwnerPassword']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
permissions_validator = EnumAttributeValidator.new('String', ["DisallowAll", "Printing", "ModifyContents", "ContentCopy", "ModifyAnnotations", "FillIn", "ContentCopyForAccessibility", "DocumentAssembly", "HighResolutionPrinting", "AllowAll"])
return false unless permissions_validator.valid?(@permissions)
encryption_algorithm_validator = EnumAttributeValidator.new('String', ["RC4_40", "RC4_128"])
return false unless encryption_algorithm_validator.valid?(@encryption_algorithm)
return true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] permissions Object to be assigned
def permissions=(permissions)
validator = EnumAttributeValidator.new('String', ["DisallowAll", "Printing", "ModifyContents", "ContentCopy", "ModifyAnnotations", "FillIn", "ContentCopyForAccessibility", "DocumentAssembly", "HighResolutionPrinting", "AllowAll"])
unless validator.valid?(permissions)
fail ArgumentError, "invalid value for 'permissions', must be one of #{validator.allowable_values}."
end
@permissions = permissions
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] encryption_algorithm Object to be assigned
def encryption_algorithm=(encryption_algorithm)
validator = EnumAttributeValidator.new('String', ["RC4_40", "RC4_128"])
unless validator.valid?(encryption_algorithm)
fail ArgumentError, "invalid value for 'encryption_algorithm', must be one of #{validator.allowable_values}."
end
@encryption_algorithm = encryption_algorithm
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
permissions == o.permissions &&
encryption_algorithm == o.encryption_algorithm &&
user_password == o.user_password &&
owner_password == o.owner_password
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[permissions, encryption_algorithm, user_password, owner_password].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = AsposeDiagramCloud.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.111111 | 248 | 0.647536 |
bb2d49957c7a8ba5bca8f99229255b2864321635 | 3,080 | # frozen_string_literal: true
require File.dirname(__FILE__) + '/../../test_helper.rb'
class Drip::Client::EventsTest < Drip::TestCase
def setup
@client = Drip::Client.new { |c| c.account_id = "12345" }
end
context "#track_event" do
setup do
@email = "[email protected]"
@action = "Signed up"
@properties = { "foo" => "bar" }
end
context "without options" do
setup do
@payload = {
"events" => [{
"email" => @email,
"action" => @action,
"properties" => @properties
}]
}.to_json
@response_status = 201
@response_body = "{}"
stub_request(:post, "https://api.getdrip.com/v2/12345/events").
to_return(status: @response_status, body: @response_body, headers: {})
end
should "send the right request" do
expected = Drip::Response.new(@response_status, JSON.parse(@response_body))
assert_equal expected, @client.track_event(@email, @action, @properties)
end
end
context "with options" do
setup do
@occurred_at = "2015-09-28T10:00:00Z"
@options = { occurred_at: @occurred_at }
@payload = {
"events" => [{
"occurred_at" => @occurred_at,
"email" => @email,
"action" => @action,
"properties" => @properties
}]
}.to_json
@response_status = 201
@response_body = "{}"
stub_request(:post, "https://api.getdrip.com/v2/12345/events").
to_return(status: @response_status, body: @response_body, headers: {})
end
should "send the right request" do
expected = Drip::Response.new(@response_status, JSON.parse(@response_body))
assert_equal expected, @client.track_event(@email, @action, @properties, @options)
end
end
end
context "#track_events" do
setup do
@events = [
{
email: "[email protected]",
action: "subscribed"
},
{
email: "[email protected]",
action: "unsubscribed"
}
]
@payload = { "batches" => [{ "events" => @events }] }.to_json
@response_status = 201
@response_body = "{}"
stub_request(:post, "https://api.getdrip.com/v2/12345/events/batches").
to_return(status: @response_status, body: @response_body, headers: {})
end
should "send the right request" do
expected = Drip::Response.new(@response_status, JSON.parse(@response_body))
assert_equal expected, @client.track_events(@events)
end
end
context "#event_actions" do
setup do
@response_status = 200
@response_body = "{}"
stub_request(:get, "https://api.getdrip.com/v2/12345/event_actions").
to_return(status: @response_status, body: @response_body, headers: {})
end
should "send the right request" do
expected = Drip::Response.new(@response_status, JSON.parse(@response_body))
assert_equal expected, @client.event_actions
end
end
end
| 28 | 90 | 0.583766 |
e9a3adb3d733ba711b1b8c7e87093317fd7a4c05 | 3,176 | require 'dry/core/class_attributes'
require 'dry-types'
RSpec.describe 'Class Macros' do
before do
module Test
class MyClass
extend Dry::Core::ClassAttributes
defines :one, :two, :three
one 1
two 2
three 3
end
class OtherClass < Test::MyClass
two 'two'
three nil
end
end
end
it 'defines accessor like methods on the class and subclasses' do
%i(one two three).each do |method_name|
expect(Test::MyClass).to respond_to(method_name)
expect(Test::OtherClass).to respond_to(method_name)
end
end
it 'allows storage of values on the class' do
expect(Test::MyClass.one).to eq(1)
expect(Test::MyClass.two).to eq(2)
expect(Test::MyClass.three).to eq(3)
end
it 'allows overwriting of inherited values with nil' do
expect(Test::OtherClass.three).to eq(nil)
end
context 'type option' do
let(:klass) do
module Test
class NewClass
extend Dry::Core::ClassAttributes
end
end
Test::NewClass
end
context 'using classes' do
before do
klass.defines :one, type: String
end
it 'allows to pass type option' do
klass.one '1'
expect(Test::NewClass.one).to eq '1'
end
it 'raises InvalidClassAttributeValue when invalid value is pass' do
expect{
klass.one 1
}.to raise_error(
Dry::Core::InvalidClassAttributeValue,
'Value 1 is invalid for class attribute :one'
)
end
end
context 'using dry-types' do
before do
module Test
class Types
include Dry::Types.module
end
end
klass.defines :one, type: Test::Types::String
end
it 'allows to pass type option' do
klass.one '1'
expect(Test::NewClass.one).to eq '1'
end
it 'raises InvalidClassAttributeValue when invalid value is pass' do
expect{
klass.one 1
}.to raise_error(Dry::Core::InvalidClassAttributeValue)
end
end
end
it 'allows inheritance of values' do
expect(Test::OtherClass.one).to eq(1)
end
it 'allows overwriting of inherited values' do
expect(Test::OtherClass.two).to eq('two')
end
it 'copies values from the parent before running hooks' do
subclass_value = nil
module_with_hook = Module.new do
define_method(:inherited) do |klass|
super(klass)
subclass_value = klass.one
end
end
base_class = Class.new do
extend Dry::Core::ClassAttributes
extend module_with_hook
defines :one
one 1
end
Class.new(base_class)
expect(subclass_value).to be 1
end
it 'works with private setters/getters and inheritance' do
base_class = Class.new do
extend Dry::Core::ClassAttributes
defines :one
class << self; private :one; end
one 1
end
spec = self
child = Class.new(base_class) do |chld|
spec.instance_exec { expect(chld.send(:one)).to spec.eql(1) }
one "one"
end
expect(child.send(:one)).to eql("one")
end
end
| 21.459459 | 74 | 0.61272 |
629db25ebf4bd9eb49475c052418425184b3ad28 | 567 | working_directory '/var/www/myappda/current'
pid '/var/www/myappda/current/tmp/pids/unicorn.pid'
stderr_path '/var/www/myappda/log/unicorn.log'
stdout_path '/var/www/myappda/log/unicorn.log'
listen '/tmp/unicorn.myappda.sock'
worker_processes 12
timeout 30
before_fork do |server, worker|
old_pid = "/var/www/myappda/current/tmp/pids/unicorn.pid.oldbin"
if old_pid != server.pid
begin
sig = (worker.nr + 1) >= server.worker_processes ? :QUIT : :TTOU
Process.kill(sig, File.read(old_pid).to_i)
rescue Errno::ENOENT, Errno::ESRCH
end
end
end
| 29.842105 | 68 | 0.730159 |
4adfe37a2501c5aee7e2052ecc0fb428c12a3247 | 551 | module Cnab240::V80
class AgenciaItau < BinData::Record
include Cnab240::DefaultMixin
include Cnab240::SegmentoMixin
lstring :zeros_1, :length => 1, :pad_byte => '0'
lstring :favorecido_agencia, :length => 4, :pad_byte => '0'
string :brancos_1, :length => 1, :pad_byte => ' '
lstring :zeros_2, :length => 6, :pad_byte => '0'
lstring :favorecido_conta, :length => 6, :pad_byte => '0'
string :brancos_2, :length => 1, :pad_byte => ' '
lstring :favorecido_agencia_conta_dv, :length => 1, :pad_byte => '0'
end
end | 36.733333 | 72 | 0.642468 |
d55826981900b2d0565f8ac227d297dc34f50827 | 3,959 | # frozen_string_literal: true
require_relative './configuration_validator'
module Reek
module Configuration
# Responsible for converting marked strings coming from the outside world
# into proper regexes.
class ConfigurationConverter
REGEXABLE_ATTRIBUTES = %w(accept reject exclude).freeze
include ConfigurationValidator
attr_reader :configuration
# @param configuration [Hash] e.g.
#
# detectors => {
# "UnusedPrivateMethod" => {"exclude"=>["/exclude regexp/"]},
# "UncommunicativeMethodName"=>{"reject"=>["reject name"], "accept"=>["accept name"]
# },
# directories => {
# "app/controllers" => {
# "UnusedPrivateMethod" => {"exclude"=>["/exclude regexp/"]},
# "UncommunicativeMethodName"=>{"reject"=>["reject name"], "accept"=>["accept name"]}
# }
# }
def initialize(configuration)
@configuration = configuration
end
# Converts all marked strings across the whole configuration to regexes.
# @return [Hash]
#
def convert
strings_to_regexes_for_detectors
strings_to_regexes_for_directories
configuration
end
private
# @param value [String] String that is potentially marked as regex, e.g. "/foobar/".
# @return [Bool] if the string in question is marked as regex.
#
# @quality :reek:UtilityFunction
def marked_as_regex?(value)
value.start_with?('/') && value.end_with?('/')
end
# @param value [value] String that is potentially marked as regex, e.g. "/foobar/".
# @return [Regexp] e.g. /foobar/.
#
def to_regex(value)
marked_as_regex?(value) ? Regexp.new(value[1..-2]) : value
end
# @param detector_configuration [Hash] e.g.
# { "UnusedPrivateMethod" => {"exclude"=>["/exclude regexp/"] }
# @return [Array] all the attributes from the detector configuration that potentially contain regexes.
# Using this example above this would just be "exclude".
#
# @quality :reek:UtilityFunction
def convertible_attributes(detector_configuration)
detector_configuration.keys & REGEXABLE_ATTRIBUTES
end
# Iterates over our detector configuration and converts all marked strings into regexes.
# @return nil
#
# @quality :reek:DuplicateMethodCall { max_calls: 3 }
# @quality :reek:NestedIterators { max_allowed_nesting: 3 }
# @quality :reek:TooManyStatements { max_statements: 6 }
def strings_to_regexes_for_detectors
return unless configuration[DETECTORS_KEY]
configuration[DETECTORS_KEY].tap do |detectors|
detectors.keys.each do |detector|
convertible_attributes(detectors[detector]).each do |attribute|
detectors[detector][attribute] = detectors[detector][attribute].map do |item|
to_regex item
end
end
end
end
end
# Iterates over our directory configuration and converts all marked strings into regexes.
# @return nil
#
# @quality :reek:DuplicateMethodCall { max_calls: 3 }
# @quality :reek:NestedIterators { max_allowed_nesting: 4 }
# @quality :reek:TooManyStatements { max_statements: 7 }
def strings_to_regexes_for_directories
return unless configuration[DIRECTORIES_KEY]
configuration[DIRECTORIES_KEY].tap do |directories|
directories.keys.each do |directory|
directories[directory].each do |detector, configuration|
convertible_attributes(configuration).each do |attribute|
directories[directory][detector][attribute] = directories[directory][detector][attribute].map do |item|
to_regex item
end
end
end
end
end
end
end
end
end
| 35.666667 | 119 | 0.63021 |
e88f1ec3d25dba8ba4ad3126e52fd2ae7851948e | 514 | cask :v1 => 'meerkat' do
version '1.5.3'
sha256 'bf5a5e492463a7ec1c3e959a55227dd6fcec5bb902124f9bde819bf4f5933982'
url "http://codesorcery.net/downloads/Meerkat_#{version}.dmg"
appcast 'http://codesorcery.net/appcasts/Meerkat.xml',
:sha256 => 'ef91167a375342e078f147e20477056552bef06ea9e306a93ffb8a17ad4e654c'
homepage 'http://codesorcery.net/meerkat'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'Meerkat.app'
end
| 39.538462 | 115 | 0.764591 |
d519b829c511163a522f2c6aafe13e053bf8287d | 1,898 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
require 'rack/test'
describe "PATCH /api/v3/grids/:id/form", type: :request, content_type: :json do
include Rack::Test::Methods
include API::V3::Utilities::PathHelper
shared_let(:current_user) do
FactoryBot.create(:user)
end
let(:params) { {} }
subject(:response) { last_response }
before do
login_as(current_user)
end
describe '#post' do
before do
post path, params.to_json, 'CONTENT_TYPE' => 'application/json'
end
context 'for a non existing grid' do
let(:path) { api_v3_paths.grid_form(5) }
it 'returns 404 NOT FOUND' do
expect(subject.status)
.to eql 404
end
end
end
end
| 30.612903 | 91 | 0.722866 |
ff5e4ca82e5dfb34b22baa60289cfe330a53eeb1 | 438 | # frozen_string_literal: true
module Resolvers
class UserStarredProjectsResolver < BaseResolver
type Types::ProjectType.connection_type, null: true
argument :search, GraphQL::STRING_TYPE,
required: false,
description: 'Search query.'
alias_method :user, :object
def resolve(**args)
StarredProjectsFinder.new(user, params: args, current_user: current_user).execute
end
end
end
| 24.333333 | 87 | 0.703196 |
bf03bf52c2b3637748cb359e9947d00142bbd5b2 | 3,998 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = Uglifier.new(harmony: true)
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
config.public_file_server.enabled = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "my-bar_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.216495 | 102 | 0.758379 |
38ce93051ea0345a932a7ff37f5ea8512290a5f5 | 388 | sensu_check "valid_standalone_check" do
interval 20
command 'true'
standalone true
end
sensu_check "valid_pubsub_check" do
interval 20
command 'true'
subscribers ['all']
end
sensu_check "removed_check" do
action :delete
end
# proxy client
sensu_check "valid_proxy_client_check" do
interval 20
command 'true'
standalone true
source 'some-site-being-monitored'
end
| 16.166667 | 41 | 0.765464 |
263df0c8e4aa5837e647693afae3c5d3101b10b5 | 199 | class CreateSearchSubscriptions < ActiveRecord::Migration
def change
create_table :search_subscriptions do |t|
t.string :query
t.string :email
t.timestamps
end
end
end
| 18.090909 | 57 | 0.698492 |
ac6278ff56cd78b9a74ee48fc875a9b9af9a8ddc | 4,460 | require 'pg_adaptor'
RSpec.describe 'adapting structs into pg' do
let(:db) { Sequel.postgres 'pg_adaptor_test' }
before do
PGAdaptor.db = db
db.extension :pg_array
db.extension :pg_json
db.create_table :test_table do
primary_key :id
String :name
String :other
column :members, "text[]"
column :info, "jsonb"
String :old_data # used to demo only fields specified inserted
end
end
after do
db.drop_table :test_table
end
describe 'db setup' do
it 'can be configured' do
PGAdaptor.db = fake = double
expect(PGAdaptor.db).to eq fake
end
end
describe 'using the adaptor' do
let(:klass) { Struct.new :name, :other, :members, :info, :id }
let(:adaptor) { PGAdaptor.new :test_table, klass }
let(:table) { db[:test_table] }
describe 'with a new model' do
let(:model) { klass.new 'Test Model','Some Data',['Some Members'],{ some: :info } }
let(:data) { table.order(:id).last }
shared_examples_for 'creates a record' do
it 'changes the number of items in the table' do
expect { perform }.to change { table.count }.by(1)
end
it 'generates an id, ignoring any set key' do
perform
expect(data[:id]).to be_a Integer
end
end
shared_examples_for 'new model' do
it_should_behave_like 'creates a record'
it 'sets my fields and values' do
perform
expect(data[:name]).to eq 'Test Model'
expect(data[:other]).to eq 'Some Data'
expect(data[:members]).to eq ['Some Members']
expect(data[:info]).to eq "some" => "info"
end
end
context 'inserting' do
let(:perform) { adaptor.insert model }
it_should_behave_like 'new model'
end
context 'upserting' do
let(:perform) { adaptor.upsert model }
it_should_behave_like 'new model'
end
end
describe 'with an existing model' do
let(:model) { klass.new 'Test Model','Some Data',['Some Other Members'], { some: :info } }
let(:id) { table.insert(name: 'My Model', other: 'Some Value', members: Sequel.pg_array(['Some Members']), info: Sequel.pg_jsonb({other: :info})) }
before do
model.id = id
end
shared_examples_for 'modifying an existing model' do
let(:data) { table.order(:id).last }
it 'doesnt change the number of items in the table' do
expect { perform }.to change { table.count }.by(0)
end
it 'sets my fields and values' do
perform
expect(data[:id]).to eq model.id
expect(data[:name]).to eq 'Test Model'
expect(data[:other]).to eq 'Some Data'
expect(data[:members]).to eq ['Some Other Members']
expect(data[:info]).to eq "some" => "info"
end
end
describe 'to update it' do
let(:perform) { adaptor.update model }
it_should_behave_like 'modifying an existing model'
end
describe 'to upsert it' do
let(:perform) { adaptor.upsert model }
it_should_behave_like 'modifying an existing model'
end
describe 'to fetch it' do
let(:result) { adaptor.fetch(id: id) }
it "returns a class" do
expect(result).to be_a klass
end
specify "the classes fields are set correctly" do
expect(result.id).to eq id
expect(result.name).to eq 'My Model'
expect(result.other).to eq 'Some Value'
expect(result.members).to eq ['Some Members']
end
end
describe 'to remove it' do
it 'removes the record matching the selector' do
expect {
adaptor.remove(id: id)
}.to change { table.count }.to 0
end
end
end
describe 'finding multiples' do
before do
3.times do |i|
table.insert(name: 'My Model', other: i)
end
3.times do |i|
table.insert(name: 'Other Model', other: i)
end
end
let(:result) { adaptor.find(name: 'My Model') }
it 'returns 3 models' do
expect(result.count).to eq 3
end
it 'translates all to klass' do
expect(result.all? { |k| k.is_a? klass }).to be true
end
it 'gets them all' do
expect(result.map(&:other)).to eq ['0', '1', '2']
end
end
end
end
| 29.342105 | 156 | 0.577354 |
f879536183b999a95167ff55e562c98ac4ef34c2 | 2,382 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
#config.eager_load = false
#https://github.com/voormedia/rails-erd/issues/322#issuecomment-568368871
config.eager_load = ENV.fetch("EAGER_LOAD", false).present?
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp', 'caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 36.646154 | 87 | 0.764064 |
fffdc88b53811d431368c42af3cde2a3c8cd623b | 117 | class Donation < ActiveRecord::Base
belongs_to :user
validates :date, :presence => true, :valid_date => true
end
| 23.4 | 57 | 0.726496 |
382d8e8cdcb765233239f128b6b4f8ed15a63499 | 2,369 | class Flow < Sequel::Model
require 'mustermann'
many_to_one :site
many_to_one :store
many_to_one :media_store, class: :Store
def name
if post_kind
return post_kind.capitalize
else
return "Files"
end
end
def url_pattern
return Mustermann.new(url_template) # type: :sinatra
end
def path_pattern
return Mustermann.new(path_template) # type: :sinatra
end
def media_url_pattern
return Mustermann.new(media_url_template) # type: :sinatra
end
def media_path_pattern
return Mustermann.new(media_path_template) # type: :sinatra
end
def url_for_post(post)
begin
relative_url = url_pattern.expand(:ignore, post.render_variables)
return URI.join(site.url, relative_url).to_s
rescue => e
puts "#{e.message} #{e.backtrace.join("\n")}"
raise SitewriterError.new("template", "Unable to generate post url: #{e.message}", 500)
end
end
def file_path_for_post(post)
begin
return path_pattern.expand(:ignore, post.render_variables)
rescue => e
puts "#{e.message} #{e.backtrace.join("\n")}"
raise SitewriterError.new("template", "Unable to generate file path: #{e.message}", 500)
end
end
def file_content_for_post(post)
begin
return Mustache.render(content_template, post.render_variables).encode(universal_newline: true)
rescue => e
puts "#{e.message} #{e.backtrace.join("\n")}"
raise SitewriterError.new("template", "Unable to apply content template: #{e.message}", 500)
end
end
def store_post(post)
store.put(file_path_for_post(post), file_content_for_post(post), post_kind)
return url_for_post(post)
end
def url_for_media(media)
relative_url = media_url_pattern.expand(:ignore, media.render_variables)
return URI.join(site.url, relative_url).to_s
end
def file_path_for_media(media)
return media_path_pattern.expand(:ignore, media.render_variables)
end
def store_file(media)
media_store.upload(file_path_for_media(media), media.file, "file")
return url_for_media(media)
end
def attach_photo_url(post, url)
# TODO: allow alt text in hash for JSON (spec 3.3.2)
post.attach_url(:photo, url)
end
def attach_photo_media(post, media)
file_flow = site.file_flow
url = file_flow.store_file(media)
post.attach_url(:photo, url)
end
end
| 26.617978 | 101 | 0.704095 |
61bc5b499afae7a71d035297373723b93839c39c | 1,915 | module PaginationHelper
def self.get_self_link(collection, query)
self_query = query.clone
self_query = query.except(:page).clone
self_query['page[number]'] = collection.current_page
self_query['page[size]'] = collection.per_page
"#{ENV['APIGATEWAY_URL']}/v1/dashboard?#{self_query.to_query}"
end
def self.get_prev_link(collection, query)
current = collection.current_page
prev_query = query.except(:page).clone
prev_query['page[number]'] = current <= 1 ? 1 : current - 1
prev_query['page[size]'] = collection.per_page
"#{ENV['APIGATEWAY_URL']}/v1/dashboard?#{prev_query.to_query}"
end
def self.get_next_link(collection, query)
total = collection.total_pages
current = collection.current_page
next_query = query.except(:page).clone
next_query['page[number]'] = current >= total ? total : current + 1
next_query['page[size]'] = collection.per_page
"#{ENV['APIGATEWAY_URL']}/v1/dashboard?#{next_query.to_query}"
end
def self.get_first_link(collection, query)
first_query = query.except(:page).clone
first_query['page[number]'] = 1
first_query['page[size]'] = collection.per_page
"#{ENV['APIGATEWAY_URL']}/v1/dashboard?#{first_query.to_query}"
end
def self.get_last_link(collection, query)
last_query = query.except(:page).clone
last_query['page[number]'] = collection.total_pages
last_query['page[size]'] = collection.per_page
"#{ENV['APIGATEWAY_URL']}/v1/dashboard?#{last_query.to_query}"
end
def self.handmade_pagination_links(collection, params)
query = params.except(:controller, :action, :format, :loggedUser, :user).clone.permit!
{
self: get_self_link(collection, query),
prev: get_prev_link(collection, query),
next: get_next_link(collection, query),
first: get_first_link(collection, query),
last: get_last_link(collection, query),
}
end
end
| 36.826923 | 90 | 0.703394 |
916ded239915b14c9fc1f94961a2b904be188010 | 2,597 | # default[:apache_tomcat][:java_url]='http://download.oracle.com/otn-pub/java/jdk/7u79-b15/jdk-7u79-linux-x64.tar.gz'
default[:apache_tomcat][:java_url]='http://192.168.79.1/sources/jdk-7u79-linux-x64.tar.gz'
default[:apache_tomcat][:java_src]='jdk-7u79-linux-x64.tar.gz'
default[:apache_tomcat][:java_unfold]='jdk1.7.0_79'
###################################################
# default[:apache_tomcat][:tomcat_url]='http://archive.apache.org/dist/tomcat/tomcat-6/v6.0.36/bin/apache-tomcat-6.0.36.tar.gz'
default[:apache_tomcat][:tomcat_url]='http://192.168.79.1/sources/apache-tomcat-6.0.36.tar.gz'
default[:apache_tomcat][:tomcat_src]='apache-tomcat-6.0.36.tar.gz'
default[:apache_tomcat][:tomcat_unfold]='apache-tomcat-6.0.36'
############################################################
# default[:apache_tomcat][:apache_url]='http://archive.apache.org/dist/httpd/httpd-2.4.10.tar.gz'
default[:apache_tomcat][:apache_url]='http://192.168.79.1/sources/httpd-2.4.10.tar.gz'
default[:apache_tomcat][:apache_src]='httpd-2.4.10.tar.gz'
default[:apache_tomcat][:apache_unfold]='httpd-2.4.10'
default[:apache_tomcat][:apache_conf]='/usr/local/apache/conf/httpd.conf'
default[:apache_tomcat][:apache_mime]='/usr/local/apache/conf/mime.types'
default[:apache_tomcat][:apache_wwwroot]='/usr/local/apache/htdocs'
default[:apache_tomcat][:timezone]='UTC'
#####################################################
# default[:apache_tomcat][:apr_url]='http://archive.apache.org/dist/apr/apr-1.5.1.tar.gz'
default[:apache_tomcat][:apr_url]='http://192.168.79.1/sources/apr-1.5.1.tar.gz'
default[:apache_tomcat][:apr_src]='apr-1.5.1.tar.gz'
default[:apache_tomcat][:apr_unfold]='apr-1.5.1'
#####################################################
# default[:apache_tomcat][:aprutil_url]='http://archive.apache.org/dist/apr/apr-util-1.5.4.tar.gz'
default[:apache_tomcat][:aprutil_url]='http://192.168.79.1/sources/apr-util-1.5.4.tar.gz'
default[:apache_tomcat][:aprutil_src]='apr-util-1.5.4.tar.gz'
default[:apache_tomcat][:aprutil_unfold]='apr-util-1.5.4'
########################################################
#default[:apache_tomcat][:php_url]='http://php.net/distributions/php-5.5.14.tar.gz'
default[:apache_tomcat][:php_url]='http://192.168.79.1/sources/php-5.5.14.tar.gz'
default[:apache_tomcat][:php_src]='php-5.5.14.tar.gz'
default[:apache_tomcat][:php_unfold]='php-5.5.14'
########################################################
default[:apache_tomcat][:rhel6_url]='http://192.168.79.1/sources/rhel-server-6.5-x86_64-dvd.iso'
default[:apache_tomcat][:rhel6_iso]='rhel-server-6.5-x86_64-dvd.iso'
| 54.104167 | 127 | 0.648826 |
5d5c8bdb071ac255c5a2a7bbd88d66607b9534e4 | 802 | # frozen_string_literal: true
module GraphQL
class UnauthorizedFieldError < GraphQL::UnauthorizedError
# @return [Field] the field that failed the authorization check
attr_accessor :field
def initialize(message = nil, object: nil, type: nil, context: nil, field: nil)
if message.nil? && [field, type].any?(&:nil?)
raise ArgumentError, "#{self.class.name} requires either a message or keywords"
end
@field = field
message ||= begin
if object
"An instance of #{object.class} failed #{type.name}'s authorization check on field #{field.name}"
else
"Failed #{type.name}'s authorization check on field #{field.name}"
end
end
super(message, object: object, type: type, context: context)
end
end
end
| 33.416667 | 107 | 0.65212 |
5d0480a2d48f92de9eb37f45b1a2f3c2b60ad2a3 | 660 | namespace :migration do
desc "Usage: RAILS_ENV=production bundle exec rake migration:fill_dao_transactions_count_to_address"
task fill_dao_transactions_count_to_address: :environment do
progress_bar = ProgressBar.create({
total: Address.count,
format: "%e %B %p%% %c/%C"
})
values =
Address.all.map do |address|
progress_bar.increment
dao_transactions_count = address.ckb_dao_transactions.count
{ id: address.id, dao_transactions_count: dao_transactions_count, created_at: address.created_at, updated_at: Time.current }
end
Address.upsert_all(values) if values.present?
puts "done"
end
end
| 30 | 130 | 0.730303 |
ed26559326cfbda1db84e665df8f2ddd76ababdf | 3,610 | #
# Cookbook:: postgresql
# Resource:: postgresql_table
#
# Copyright:: 2013, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default_action :create
property :table, :kind_of => String, :name_property => true
property :cluster, :kind_of => String, :required => true
property :database, :kind_of => String, :required => true
property :schema, :kind_of => String, :default => "public"
property :owner, :kind_of => String, :required => [:create]
property :permissions, :kind_of => Hash, :default => {}
action :create do
if tables.include?(qualified_name)
if new_resource.owner != tables[qualified_name][:owner]
converge_by("set owner for #{new_resource} to #{new_resource.owner}") do
Chef::Log.info("Setting owner for #{new_resource} to #{new_resource.owner}")
cluster.execute(:command => "ALTER TABLE #{qualified_name} OWNER TO \"#{new_resource.owner}\"", :database => new_resource.database)
end
end
tables[qualified_name][:permissions].each_key do |user|
next if new_resource.permissions[user]
converge_by("revoke all for #{user} on #{new_resource}") do
Chef::Log.info("Revoking all for #{user} on #{new_resource}")
cluster.execute(:command => "REVOKE ALL ON #{qualified_name} FROM \"#{user}\"", :database => new_resource.database)
end
end
new_resource.permissions.each do |user, new_privileges|
current_privileges = tables[qualified_name][:permissions][user] || {}
new_privileges = Array(new_privileges)
if new_privileges.include?(:all)
new_privileges |= OpenStreetMap::PostgreSQL::TABLE_PRIVILEGES
end
OpenStreetMap::PostgreSQL::TABLE_PRIVILEGES.each do |privilege|
if new_privileges.include?(privilege)
unless current_privileges.include?(privilege)
converge_by("grant #{privilege} for #{user} on #{new_resource}") do
Chef::Log.info("Granting #{privilege} for #{user} on #{new_resource}")
cluster.execute(:command => "GRANT #{privilege.to_s.upcase} ON #{qualified_name} TO \"#{user}\"", :database => new_resource.database)
end
end
elsif current_privileges.include?(privilege)
converge_by("revoke #{privilege} for #{user} on #{new_resource}") do
Chef::Log.info("Revoking #{privilege} for #{user} on #{new_resource}")
cluster.execute(:command => "REVOKE #{privilege.to_s.upcase} ON #{qualified_name} FROM \"#{user}\"", :database => new_resource.database)
end
end
end
end
end
end
action :drop do
if tables.include?(qualified_name)
converge_by("drop #{new_resource}") do
Chef::Log.info("Dropping #{new_resource}")
cluster.execute(:command => "DROP TABLE #{qualified_name}", :database => new_resource.database)
end
end
end
action_class do
def cluster
@cluster ||= OpenStreetMap::PostgreSQL.new(new_resource.cluster)
end
def tables
@tables ||= cluster.tables(new_resource.database)
end
def qualified_name
"#{new_resource.schema}.#{new_resource.name}"
end
end
| 37.604167 | 148 | 0.684488 |
01b0e460961c7017e01ec2f755a8d2e70f628e91 | 3,897 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
module Bim::Bcf::API::V2_1
class TopicsAPI < ::API::OpenProjectAPI
resources :topics do
helpers do
def topics
::Bim::Bcf::Issue.of_project(@project)
end
def transform_attributes(attributes)
wp_attributes = ::Bim::Bcf::Issues::TransformAttributesService
.new(@project)
.call(attributes)
.result
attributes
.slice(*::Bim::Bcf::Issue::SETTABLE_ATTRIBUTES)
.merge(wp_attributes)
end
# In a put request, every non required and non provided
# parameter needs to be nilled. As we cannot nil type, status and priority
# as they are required for a work package we use the default values.
def default_put_params
{
index: nil,
assigned_to: nil,
description: nil,
due_date: nil,
subject: nil,
type: @project.types.default.first,
status: Status.default,
priority: IssuePriority.default
}
end
end
after_validation do
authorize :view_linked_issues, context: @project
end
get &::Bim::Bcf::API::V2_1::Endpoints::Index
.new(model: Bim::Bcf::Issue,
api_name: 'Topics',
scope: -> { topics })
.mount
post &::Bim::Bcf::API::V2_1::Endpoints::Create
.new(model: Bim::Bcf::Issue,
api_name: 'Topics',
params_modifier: ->(attributes) {
transform_attributes(attributes)
.merge(project: @project)
})
.mount
route_param :topic_uuid, regexp: /\A[a-f0-9\-]+\z/ do
after_validation do
@issue = topics.find_by_uuid!(params[:topic_uuid])
end
get &::Bim::Bcf::API::V2_1::Endpoints::Show
.new(model: Bim::Bcf::Issue,
api_name: 'Topics')
.mount
put &::Bim::Bcf::API::V2_1::Endpoints::Update
.new(model: Bim::Bcf::Issue,
api_name: 'Topics',
params_modifier: ->(attributes) {
transform_attributes(attributes)
.reverse_merge(default_put_params)
})
.mount
delete &::Bim::Bcf::API::V2_1::Endpoints::Delete
.new(model: Bim::Bcf::Issue,
api_name: 'Topics')
.mount
mount ::Bim::Bcf::API::V2_1::Viewpoints::API
end
end
end
end
| 33.886957 | 91 | 0.579933 |
f7a94b359b20ca7fb3a958e23e4e7bedaf764c5b | 7,859 | # encoding: utf-8
require 'pathspec/regexspec'
class GitIgnoreSpec < RegexSpec
attr_reader :regex
def initialize(pattern)
pattern = pattern.strip unless pattern.nil?
# A pattern starting with a hash ('#') serves as a comment
# (neither includes nor excludes files). Escape the hash with a
# back-slash to match a literal hash (i.e., '\#').
if pattern.start_with?('#')
@regex = nil
@inclusive = nil
# A blank pattern is a null-operation (neither includes nor
# excludes files).
elsif pattern.empty?
@regex = nil
@inclusive = nil
# Patterns containing three or more consecutive stars are invalid and
# will be ignored.
elsif pattern =~ /\*\*\*+/
@regex = nil
@inclusive = nil
# We have a valid pattern!
else
# A pattern starting with an exclamation mark ('!') negates the
# pattern (exclude instead of include). Escape the exclamation
# mark with a back-slash to match a literal exclamation mark
# (i.e., '\!').
if pattern.start_with?('!')
@inclusive = false
# Remove leading exclamation mark.
pattern = pattern[1..-1]
else
@inclusive = true
end
# Remove leading back-slash escape for escaped hash ('#') or
# exclamation mark ('!').
if pattern.start_with?('\\')
pattern = pattern[1..-1]
end
# Split pattern into segments. -1 to allow trailing slashes.
pattern_segs = pattern.split('/', -1)
# Normalize pattern to make processing easier.
# A pattern beginning with a slash ('/') will only match paths
# directly on the root directory instead of any descendant
# paths. So, remove empty first segment to make pattern relative
# to root.
if pattern_segs[0].empty?
pattern_segs.shift
else
# A pattern without a beginning slash ('/') will match any
# descendant path. This is equivilent to "**/{pattern}". So,
# prepend with double-asterisks to make pattern relative to
# root.
if pattern_segs.length == 1 && pattern_segs[0] != '**'
pattern_segs.insert(0, '**')
end
end
# A pattern ending with a slash ('/') will match all descendant
# paths of if it is a directory but not if it is a regular file.
# This is equivilent to "{pattern}/**". So, set last segment to
# double asterisks to include all descendants.
if pattern_segs[-1].empty?
pattern_segs[-1] = '**'
end
# Handle platforms with backslash separated paths
if File::SEPARATOR == '\\'
path_sep = '\\\\'
else
path_sep = '/'
end
# Build regular expression from pattern.
regex = '^'
need_slash = false
regex_end = pattern_segs.size - 1
pattern_segs.each_index do |i|
seg = pattern_segs[i]
if seg == '**'
# A pattern consisting solely of double-asterisks ('**')
# will match every path.
if i == 0 && i == regex_end
regex.concat('.+')
# A normalized pattern beginning with double-asterisks
# ('**') will match any leading path segments.
elsif i == 0
regex.concat("(?:.+#{path_sep})?")
need_slash = false
# A normalized pattern ending with double-asterisks ('**')
# will match any trailing path segments.
elsif i == regex_end
regex.concat("#{path_sep}.*")
# A pattern with inner double-asterisks ('**') will match
# multiple (or zero) inner path segments.
else
regex.concat("(?:#{path_sep}.+)?")
need_slash = true
end
# Match single path segment.
elsif seg == '*'
if need_slash
regex.concat(path_sep)
end
regex.concat("[^#{path_sep}]+")
need_slash = true
else
# Match segment glob pattern.
if need_slash
regex.concat(path_sep)
end
regex.concat(translate_segment_glob(seg))
need_slash = true
end
end
regex.concat('$')
super(regex)
end
end
def match(path)
super(path)
end
def translate_segment_glob(pattern)
"""
Translates the glob pattern to a regular expression. This is used in
the constructor to translate a path segment glob pattern to its
corresponding regular expression.
*pattern* (``str``) is the glob pattern.
Returns the regular expression (``str``).
"""
# NOTE: This is derived from `fnmatch.translate()` and is similar to
# the POSIX function `fnmatch()` with the `FNM_PATHNAME` flag set.
escape = false
regex = ''
i = 0
while i < pattern.size
# Get next character.
char = pattern[i].chr
i += 1
# Escape the character.
if escape
escape = false
regex += Regexp.escape(char)
# Escape character, escape next character.
elsif char == '\\'
escape = true
# Multi-character wildcard. Match any string (except slashes),
# including an empty string.
elsif char == '*'
regex += '[^/]*'
# Single-character wildcard. Match any single character (except
# a slash).
elsif char == '?'
regex += '[^/]'
# Braket expression wildcard. Except for the beginning
# exclamation mark, the whole braket expression can be used
# directly as regex but we have to find where the expression
# ends.
# - "[][!]" matchs ']', '[' and '!'.
# - "[]-]" matchs ']' and '-'.
# - "[!]a-]" matchs any character except ']', 'a' and '-'.
elsif char == '['
j = i
# Pass brack expression negation.
if j < pattern.size && pattern[j].chr == '!'
j += 1
end
# Pass first closing braket if it is at the beginning of the
# expression.
if j < pattern.size && pattern[j].chr == ']'
j += 1
end
# Find closing braket. Stop once we reach the end or find it.
while j < pattern.size && pattern[j].chr != ']'
j += 1
end
if j < pattern.size
expr = '['
# Braket expression needs to be negated.
if pattern[i].chr == '!'
expr += '^'
i += 1
# POSIX declares that the regex braket expression negation
# "[^...]" is undefined in a glob pattern. Python's
# `fnmatch.translate()` escapes the caret ('^') as a
# literal. To maintain consistency with undefined behavior,
# I am escaping the '^' as well.
elsif pattern[i].chr == '^'
expr += '\\^'
i += 1
end
# Escape brackets contained within pattern
if pattern[i].chr == ']' && i != j
expr += '\]'
i += 1
end
# Build regex braket expression. Escape slashes so they are
# treated as literal slashes by regex as defined by POSIX.
expr += pattern[i..j].sub('\\', '\\\\')
# Add regex braket expression to regex result.
regex += expr
# Found end of braket expression. Increment j to be one past
# the closing braket:
#
# [...]
# ^ ^
# i j
#
j += 1
# Set i to one past the closing braket.
i = j
# Failed to find closing braket, treat opening braket as a
# braket literal instead of as an expression.
else
regex += '\['
end
# Regular character, escape it for regex.
else
regex << Regexp.escape(char)
end
end
regex
end
def inclusive?
@inclusive
end
end
| 28.474638 | 73 | 0.550833 |
f75ff6fbede2d282ab5f06196c6b984195b037d2 | 815 | # frozen_string_literal: true
module Yabber
# Base Message validation
module Validation
include Constants
def validate_topic(topic)
raise ArgumentError, "Invalid Topic: #{topic}. (#{TOPICS})" unless valid_topic?(topic)
end
def validate_type(type)
raise ArgumentError, "Invalid Type: #{type}. (#{TYPES})" unless valid_type?(type)
end
def validate_version(version)
raise ArgumentError, "Invalid Version: #{version}." unless valid_version?(version)
end
private
def valid_version?(version)
return false unless version.is_a?(Integer)
return false unless version.nonzero?
true
end
def valid_topic?(topic)
TOPICS.one? { |t| t == topic }
end
def valid_type?(type)
TYPES.one? { |t| t == type }
end
end
end
| 22.027027 | 92 | 0.655215 |
01a1e1dfc2255ce05e3d822b671a692b52222f7b | 16,919 | #!/usr/bin/env ruby
# -------------------------------------------------------------------------- #
# Copyright 2002-2020, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'rexml/document'
require 'base64'
require 'fileutils'
require 'erb'
class Router
# Default files
FILES = {
:resolv_conf => "/etc/resolv.conf",
:context => "/mnt/context/context.sh",
:dnsmasq_conf => "/etc/dnsmasq.conf",
:radvd_conf => "/etc/radvd.conf",
:log_file => "/var/log/router.log",
:authorized_keys => "/root/.ssh/authorized_keys"
}
# Default MAC prefix
MAC_PREFIX = "02:00"
# Default gateway (last byte)
DEFAULT_GW = "1"
# Default netmask
DEFAULT_NETMASK = "255.255.255.0"
# Context parameters that are base64 encoded
BASE_64_KEYS = [:privnet, :pubnet, :template, :root_password]
# Context parameters that are XML documents
XML_KEYS = [:privnet, :pubnet, :template]
# The specification on how to fetch these attributes.
# The order in the array matters, the first non-empty one is returned.
ATTRIBUTES = {
:dns => [
{
:resource => :context
},
{
:resource => :pubnet,
:resource_xpath => 'TEMPLATE/DNS'
}
],
:search => [
{
:resource => :context
},
{
:resource => :pubnet,
:resource_xpath => 'TEMPLATE/SEARCH'
}
],
:nat => [
{
:resource => :context,
:resource_name => :forwarding
},
{
:resource => :privnet,
:resource_xpath => 'TEMPLATE/FORWARDING'
}
],
:dhcp => [
{
:resource => :context
},
{
:resource => :privnet,
:resource_xpath => 'TEMPLATE/DHCP'
}
],
:radvd => [
{
:resource => :context
},
{
:resource => :privnet,
:resource_xpath => 'TEMPLATE/RADVD'
}
],
:ntp_server => [
{
:resource => :context
},
{
:resource => :privnet,
:resource_xpath => 'TEMPLATE/NTP_SERVER'
}
],
:ssh_public_key => [
{
:resource => :context
}
],
:root_pubkey => [
{
:resource => :context
}
],
:root_password => [
{
:resource => :context
}
]
}
def initialize
mount_context
if (@context = read_context)
unpack
get_network_information
end
end
############################################################################
# GETTERS
############################################################################
def pubnet
!@pubnet[:network_id].nil?
end
def privnet
!@privnet[:network_id].nil?
end
def dns
dns_raw = get_attribute(:dns)
dns_raw.split if !dns_raw.nil?
end
def search
get_attribute(:search)
end
def root_password
get_attribute(:root_password)
end
def root_pubkey
get_attribute(:ssh_public_key) || get_attribute(:root_pubkey)
end
def nat
nat_raw = get_attribute(:nat)
nat_raw.split if !nat_raw.nil?
end
def dhcp
dhcp_raw = get_attribute(:dhcp) || ""
if dhcp_raw.downcase.match(/^y(es)?$/)
true
else
false
end
end
def ntp_server
get_attribute(:ntp_server)
end
def radvd
radvd_raw = get_attribute(:radvd) || ""
radvd = !!radvd_raw.downcase.match(/^y(es)?$/)
radvd and @privnet[:ipv6]
end
############################################################################
# ACTIONS
############################################################################
def mount_context
log("mounting context")
FileUtils.mkdir_p("/mnt/context")
run "mount -t iso9660 /dev/cdrom /mnt/context 2>/dev/null"
end
def write_resolv_conf
if dns
File.open(FILES[:resolv_conf], 'w') do |resolv_conf|
if search
resolv_conf.puts "search #{search}"
end
dns.each do |nameserver|
resolv_conf.puts "nameserver #{nameserver}"
end
end
elsif search
File.open(FILES[:resolv_conf], 'a') do |resolv_conf|
resolv_conf.puts "search #{search}"
end
end
end
def configure_network
if pubnet
ip = @pubnet[:ip]
ip6_global = @pubnet[:ip6_global]
ip6_site = @pubnet[:ip6_site]
nic = @pubnet[:interface]
netmask = @pubnet[:netmask]
gateway = @pubnet[:gateway]
run "ip link set #{nic} up"
run "ip addr add #{ip}/#{netmask} dev #{nic}"
run "ip addr add #{ip6_global} dev #{nic}" if ip6_global
run "ip addr add #{ip6_site} dev #{nic}" if ip6_site
run "ip route add default via #{gateway}"
end
if privnet
ip = @privnet[:ip]
ip6_global = @privnet[:ip6_global]
ip6_site = @privnet[:ip6_site]
nic = @privnet[:interface]
netmask = @privnet[:netmask]
run "ip link set #{nic} up"
run "ip addr add #{ip}/#{netmask} dev #{nic}"
run "ip addr add #{ip6_global} dev #{nic}" if ip6_global
run "ip addr add #{ip6_site} dev #{nic}" if ip6_site
end
end
def configure_dnsmasq
File.open(FILES[:dnsmasq_conf],'w') do |conf|
dhcp_ip_mac_pairs.collect do |ar|
conf.puts "dhcp-range=#{ar[:ip_start]},#{ar[:ip_end]},infinite"
end
conf.puts "dhcp-option=42,#{ntp_server} # ntp server" if ntp_server
conf.puts "dhcp-option=4,#{@privnet[:ip]} # name server"
dhcp_ip_mac_pairs.each do |ar|
ar[:mac_ip_pairs].each do |mac,ip,_|
conf.puts "dhcp-host=#{mac},#{ip}"
end
end
end
end
def configure_nat
nat.each do |nat_rule|
nat_rule = nat_rule.split(":")
if nat_rule.length == 2
ip, inport = nat_rule
outport = inport
elsif nat_rule.length == 3
outport, ip, inport = nat_rule
end
run "iptables -t nat -A PREROUTING -p tcp --dport #{outport} " \
"-j DNAT --to-destination #{ip}:#{inport}"
end
end
def configure_radvd
prefixes = [@privnet[:ip6_global],@privnet[:ip6_site]].compact
privnet_iface = @privnet[:interface]
radvd_conf_tpl =<<-EOF.gsub(/^\s{12}/,"")
interface <%= privnet_iface %>
{
AdvSendAdvert on;
<% prefixes.each do |p| %>
prefix <%= p %>/64
{
AdvOnLink on;
};
<% end %>
};
EOF
radvd_conf = ERB.new(radvd_conf_tpl).result(binding)
File.open(FILES[:radvd_conf],'w') {|c| c.puts radvd_conf }
end
def configure_masquerade
run "iptables -t nat -A POSTROUTING -o eth0 -j MASQUERADE"
end
def configure_ip_forward
run "sysctl -w net.ipv4.ip_forward=1"
end
def configure_root_password
run "echo -n 'root:#{root_password}'|chpasswd -e"
end
def configure_root_pubkey
FileUtils.mkdir_p(File.dirname(FILES[:authorized_keys]),:mode => 0700)
File.open(FILES[:authorized_keys], "a", 0600) do |f|
f.write(root_pubkey)
end
end
def service(service, action = :start)
action = action.to_s
run "/etc/init.d/#{service} #{action}"
end
def log(msg, command = false)
msg = "=> #{msg}" unless command
File.open(FILES[:log_file],'a') {|f| f.puts msg}
end
def has_context?
!!@context
end
############################################################################
# Private methods
############################################################################
private
def get_network_information
@pubnet = Hash.new
@privnet = Hash.new
@mac_interfaces = Hash[
Dir["/sys/class/net/*/address"].collect do |f|
[ File.read(f).strip, f.split('/')[4] ]
end
]
if (pubnet_id = get_element_xpath(:pubnet, 'ID'))
@pubnet[:network_id] = pubnet_id
xpath_ip = "TEMPLATE/NIC[NETWORK_ID='#{pubnet_id}']/IP"
xpath_ip6_global = "TEMPLATE/NIC[NETWORK_ID='#{pubnet_id}']/IP6_GLOBAL"
xpath_ip6_site = "TEMPLATE/NIC[NETWORK_ID='#{pubnet_id}']/IP6_ULA"
xpath_mac = "TEMPLATE/NIC[NETWORK_ID='#{pubnet_id}']/MAC"
@pubnet[:ip] = get_element_xpath(:template, xpath_ip)
@pubnet[:ip6_global] = get_element_xpath(:template, xpath_ip6_global)
@pubnet[:ip6_site] = get_element_xpath(:template, xpath_ip6_site)
@pubnet[:mac] = get_element_xpath(:template, xpath_mac)
@pubnet[:ipv6] = true if @pubnet[:ip6_global] or @pubnet[:ip6_site]
@pubnet[:interface] = @mac_interfaces[@pubnet[:mac]]
netmask = get_element_xpath(:pubnet, 'TEMPLATE/NETWORK_MASK')
@pubnet[:netmask] = netmask || DEFAULT_NETMASK
gateway = get_element_xpath(:pubnet, 'TEMPLATE/GATEWAY')
if gateway.nil?
gateway = @pubnet[:ip].gsub(/\.\d{1,3}$/,".#{DEFAULT_GW}")
end
@pubnet[:gateway] = gateway
end
if (privnet_id = get_element_xpath(:privnet, 'ID'))
@privnet[:network_id] = privnet_id
xpath_ip = "TEMPLATE/NIC[NETWORK_ID='#{privnet_id}']/IP"
xpath_ip6_global = "TEMPLATE/NIC[NETWORK_ID='#{privnet_id}']/IP6_GLOBAL"
xpath_ip6_site = "TEMPLATE/NIC[NETWORK_ID='#{privnet_id}']/IP6_ULA"
xpath_mac = "TEMPLATE/NIC[NETWORK_ID='#{privnet_id}']/MAC"
@privnet[:ip] = get_element_xpath(:template, xpath_ip)
@privnet[:ip6_global] = get_element_xpath(:template, xpath_ip6_global)
@privnet[:ip6_site] = get_element_xpath(:template, xpath_ip6_site)
@privnet[:mac] = get_element_xpath(:template, xpath_mac)
@privnet[:ipv6] = true if @privnet[:ip6_global] or @privnet[:ip6_site]
@privnet[:interface] = @mac_interfaces[@privnet[:mac]]
netmask = get_element_xpath(:privnet, 'TEMPLATE/NETWORK_MASK')
@privnet[:netmask] = netmask || DEFAULT_NETMASK
end
end
def run(cmd)
log(cmd, true)
output = `#{cmd} 2>&1`
exitstatus = $?.exitstatus
log(output) if !output.empty?
log("ERROR: exit code #{exitstatus}") if exitstatus != 0
end
def dhcp_ip_mac_pairs
netxml = @xml[:privnet]
pairs = Array.new
netxml.elements.each('AR_POOL/AR') do |ar|
mac_ip_pairs = Array.new
ip_start = ar.elements['IP'].text
size = ar.elements['SIZE'].text.to_i
ip_start_int = ip_to_int(ip_start)
ip_end_int = ip_start_int + size
ip_end = int_to_ip(ip_end_int)
(ip_start_int..ip_end_int).each do |int_ip|
ip = int_to_ip(int_ip)
mac = ip2mac(ip)
# skip this IP if it's already taken
next if ar.elements["LEASES/LEASE[IP='#{ip}']"]
mac_ip_pairs << [mac, ip]
end
pairs << {
:ip_start => ip_start,
:ip_end => ip_end,
:size => size,
:mac_ip_pairs => mac_ip_pairs
}
end
pairs
end
def ip_to_int(ip)
num = 0
ip.split(".").each{|i| num *= 256; num = num + i.to_i}
num
end
def int_to_ip(num)
ip = Array.new
(0..3).reverse_each do |i|
ip << (((num>>(8*i)) & 0xff))
end
ip.join('.')
end
def ip2mac(ip)
mac = MAC_PREFIX + ':' \
+ ip.split('.').collect{|i| sprintf("%02X",i)}.join(':')
mac.downcase
end
def mac2ip(mac)
mac.split(':')[2..-1].collect{|i| i.to_i(16)}.join('.')
end
def unpack
@xml = Hash.new
BASE_64_KEYS.each do |key|
if @context.include? key
@context[key] = Base64::decode64(@context[key])
end
end
XML_KEYS.each do |key|
if @context.include? key
@xml[key] = REXML::Document.new(@context[key]).root
end
end
end
def get_attribute(name)
order = ATTRIBUTES[name]
return nil if order.nil?
order.each do |e|
if e[:resource] != :context
resource = e[:resource]
xpath = e[:resource_xpath]
value = get_element_xpath(resource, xpath)
return value if !value.nil?
else
if e[:resource_name]
resource_name = e[:resource_name]
else
resource_name = name
end
element = @context[resource_name]
return element if !element.nil?
end
end
return nil
end
def get_element_xpath(resource, xpath)
xml_resource = @xml[resource]
return nil if xml_resource.nil?
element = xml_resource.elements[xpath]
return element.text.to_s if !element.nil?
end
def read_context
return nil if !File.exists?(FILES[:context])
context = Hash.new
context_file = File.read(FILES[:context])
context_file.each_line do |line|
next if line.match(/^#/)
if (m = line.match(/^(.*?)='(.*)'$/))
key = m[1].downcase.to_sym
value = m[2]
context[key] = value
end
end
context
end
end
router = Router.new
if !router.has_context?
router.log("ERROR: Context not found. Stopping.")
exit 1
end
router.log("configure network")
router.configure_network
if router.pubnet
if router.dns || router.search
router.log("write resolv.conf")
router.write_resolv_conf
end
# Set masquerade
router.log("set masquerade")
router.configure_masquerade
# Set ipv4 forward
router.log("ip forward")
router.configure_ip_forward
# Set NAT rules
if router.nat
router.log("configure nat")
router.configure_nat
end
end
if router.privnet and router.dhcp
router.log("configure dnsmasq")
router.configure_dnsmasq
router.service("dnsmasq")
end
if router.radvd
router.log("configure radvd")
router.configure_radvd
router.service("radvd")
end
if router.root_password
router.log("configure root password")
router.configure_root_password
end
if router.root_pubkey
router.log("configure root pubkey")
router.configure_root_pubkey
end
| 28.531197 | 84 | 0.48874 |
28ea171973614cf18f9d9362e43c0cabba3cd86f | 2,732 | module Cubits
class Callback
#
# Processes callback request parsed into separate params
# and instantiates a resource object on success.
#
# @param params [Hash]
# @param params[:cubits_callback_id] [String] Value of the CUBITS_CALLBACK_ID header
# @param params[:cubits_key] [String] Value of the CUBITS_KEY header
# @param params[:cubits_signature] [String] Value of the CUBITS_SIGNATURE header
# @param params[:body] [String] Request body
# @param params[:resource_class] [Resource,nil] (optional) Instantiate a Resource based object (default: nil)
# and initialize it with parsed request body. If not specified, returns parsed body as a plain Hash
# @param params[:allow_insecure] [Boolean] (optional) Allow insecure, unsigned callbacks (default: false)
#
# @return [Resource,Hash]
#
# @raise [InvalidSignature]
# @raise [InsecureCallback]
#
def self.from_params(params = {})
result = from_params_to_hash(params)
params[:resource_class] ? params[:resource_class].new(result) : result
end
private
def self.from_params_to_hash(params)
validate_params!(params)
if params[:cubits_signature] && !params[:cubits_signature].empty?
validate_signature!(params)
elsif !params[:allow_insecure]
fail InsecureCallback, 'Refusing to process an unsigned callback for security reasons'
end
JSON.parse(params[:body])
end
def self.validate_params!(params)
unless params[:cubits_callback_id].is_a?(String)
fail ArgumentError, 'String is expected as :cubits_callback_id'
end
if params[:cubits_key] && !params[:cubits_key].is_a?(String)
fail ArgumentError, 'String is expected as :cubits_key'
end
if params[:cubits_signature] && !params[:cubits_signature].is_a?(String)
fail ArgumentError, 'String is expected as :cubits_signature'
end
fail ArgumentError, 'String is expected as :body' unless params[:body].is_a?(String)
if params[:resource_class]
unless params[:resource_class].is_a?(Class) && params[:resource_class] < Resource
fail ArgumentError, 'Resource based class is expected as :resource_class'
end
end
true
end
def self.validate_signature!(params)
connection = Cubits.connection(params[:cubits_key])
msg = params[:cubits_callback_id] + OpenSSL::Digest::SHA256.hexdigest(params[:body])
unless connection.sign_message(msg) == params[:cubits_signature]
fail InvalidSignature, 'Callback signature is invalid'
end
true
rescue ConnectionError => e
raise InvalidSignature, e.message
end
end # class Callback
end
| 39.594203 | 113 | 0.691801 |
bf2018fa7644a9cdef044491c4404e838a678c40 | 993 | lib = File.expand_path("lib", __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "simple_dsl_parser/version"
Gem::Specification.new do |spec|
spec.name = "simple_dsl_parser"
spec.version = SimpleDslParser::VERSION
spec.authors = ["xiongzenghui"]
spec.email = ["[email protected]"]
spec.summary = 'a simple DSL Parser'
spec.description = 'a simple DSL Parser'
spec.homepage = "https://github.com/xzhhe/simple_dsl_parser"
spec.license = "MIT"
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 36.777778 | 85 | 0.64854 |
87cdf0483636aac72fb9794c2df1c3814d3e7349 | 556 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe ProductStock, type: :model do
let(:product_stock) { build(:product_stock) }
it 'should have a product id' do
product_stock.product_id = nil
expect(product_stock).to_not be_valid
end
it 'should have current_stock' do
product_stock.current_stock = nil
expect(product_stock).to_not be_valid
end
it 'should belongs to a product' do
product_stock = ProductStock.reflect_on_association(:product)
expect(product_stock.macro).to eq(:belongs_to)
end
end
| 24.173913 | 65 | 0.75 |
28cb7ffa22077e118a0bf61c9bcf8ab83594a175 | 886 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(*Rails.groups)
require "address_book"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
| 36.916667 | 99 | 0.72009 |
e948b7f2de16eb216c14ffb5e1eaff3407607372 | 905 | module DeclarativePolicy
# The DSL evaluation context inside rule { ... } blocks.
# Responsible for creating and combining Rule objects.
#
# See Base.rule
class RuleDsl
def initialize(context_class)
@context_class = context_class
end
def can?(ability)
Rule::Ability.new(ability)
end
def all?(*rules)
Rule::And.make(rules)
end
def any?(*rules)
Rule::Or.make(rules)
end
def none?(*rules)
~Rule::Or.new(rules)
end
def cond(condition)
Rule::Condition.new(condition)
end
def delegate(delegate_name, condition)
Rule::DelegatedCondition.new(delegate_name, condition)
end
def method_missing(m, *a, &b)
return super unless a.empty? && !block_given?
if @context_class.delegations.key?(m)
DelegateDsl.new(self, m)
else
cond(m.to_sym)
end
end
end
end
| 19.673913 | 60 | 0.626519 |
01cd4aaf5bf54f2854fc1b105a352a5c52fe4655 | 1,127 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20190417064047) do
create_table "users", force: :cascade do |t|
t.string "name"
t.string "email"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "password_digest"
t.string "remember_digest"
t.boolean "admin", default: false
t.index ["email"], name: "index_users_on_email", unique: true
end
end
| 41.740741 | 86 | 0.755102 |
1d2d14fc0ff1375cfc9dd6237d31ebb53b379947 | 82 | Rails.application.routes.draw do
mount Fundraiser::Engine => "/fundraiser"
end
| 16.4 | 43 | 0.756098 |
038b31d2b2e8d0c225ba473f4676aecc1532dcd1 | 203 | Eson::Search::BaseQuery.new do
query do
term :tag => "something"
end
filter do |f|
range :post_date, {:from => "2010-03-01", :to => "2010-04-01"}
prefix "name.second" => "ba"
end
end
| 20.3 | 66 | 0.586207 |
e89ef3d917c18527082ad0f2bcad7fbacb4873e5 | 123 | require 'test_helper'
class TimeEventTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.375 | 45 | 0.707317 |
08b5a3383fbf33ac8a8e8f3fb45ed78ba76ddd98 | 1,558 | # frozen_string_literal: true
require_relative 'test_reporter'
class TestReporterPublicStartStop < TestReporter
# This class extends TestReporter so it includes all of the tests from
# TestReporter plus any additional test_* cases below and it
# overrides create_report to use the start/stop methods
# This specifically tests the public API of the start and stop methods of the
# MemoryProfiler module itself, and even does some extra tests exercising
# edge case handling of `current_reporter` which is done in those methods.
#
# When something fails here, and not in the private api tests, then there is
# something wrong specifically in the methods handling the `current_reporter`
# that needs to be fixed.
def create_report(options={}, &profiled_block)
profiled_block ||= -> { default_block }
MemoryProfiler.start(options)
profiled_block.call rescue nil
MemoryProfiler.stop
end
def test_module_stop_with_no_start
results = MemoryProfiler.stop
assert_nil(results)
end
def test_module_double_start
MemoryProfiler.start
reporter = MemoryProfiler::Reporter.current_reporter
MemoryProfiler.start
same_reporter = MemoryProfiler::Reporter.current_reporter
default_block
results = MemoryProfiler.stop
assert_equal(reporter, same_reporter)
# Some extra here due to variables needed in the test above
assert_equal(17, results.total_allocated)
end
def test_exception_handling
# This overrides and skips exception handling from the base TestReporter
end
end
| 33.148936 | 79 | 0.771502 |
21e0c970d688399c4cbb969612db4f44fe41be1c | 1,350 | module Grammar
class Ngram
include Grammar::LanguageHelper
attr_accessor :contents
def initialize(grams=[])
self.contents = grams
end
def omnigrams
self.contents.map{|gram| gram.is_a?(Array) && gram.flatten || nil}.compact
end
def with_leading(matches, args={})
phrase_matches = omnigrams.reject{|h| h.flatten.is_a? String}.inject([]) { |a, gram| a << gram if ([gram.flatten[0]] & matches).present?; a}
matches = Search::Ngram.new((phrase_matches).compact.select(&:present?).uniq)
matches = matches.minus(IDENTIFIERS) if args[:object_only]
matches = matches.to_a.flatten.uniq if args[:flatten]
matches
end
def with(matches)
phrase_matches = omnigrams.select{|h| h.flatten.is_a? Array}.inject([]) { |a, gram| a << gram if (gram.flatten & matches).present?; a}
Search::Ngram.new((phrase_matches).compact.select(&:present?).uniq)
end
def without(matches)
phrase_matches = omnigrams.select{|h| h.flatten.is_a? Array}.inject([]) { |a, gram| a << gram if (gram.flatten & matches).empty?; a}
Search::Ngram.new((phrase_matches).compact.select(&:present?).uniq)
end
def minus(matches)
with(matches).to_a.map{|gram| gram - matches}.compact.select(&:present?).uniq
end
def to_a
self.contents
end
end
end
| 30 | 146 | 0.651111 |
1c283e85f6219ade83d753b514a07c1254050fbb | 10,721 | require_relative '../SMatrix'
require 'test/unit'
require_relative '../storage/yale'
require_relative '../storage/dok'
require_relative '../storage/lil'
require 'complex'
require 'matrix'
class TestConditionals<Test::Unit::TestCase
# this is to test all conditionals when comparing differnet sMatrices
# NOTE: may need to create dummy stubs. similar to the previous class
# For these test cases, it doesn't need to be random test cases.
# probably just use extreme point analysis for the data points, etc.
def test_identity
#setup
yaleMatrix = SMatrix.new(Yale.new(3, 3))
dokMatrix = SMatrix.new(Dok.new(3, 3), :dok)
lilMatrix = SMatrix.new(Lil.new(3, 3), :lil)
for i in 0..2 do
for j in 0..2 do
if i == j
value = 1
else
value = 0
end
yaleMatrix[i, j] = value
dokMatrix[i, j] = value
lilMatrix[i, j] = value
end
end
#Ensure all equal the 3x3 identity matrix
assert(yaleMatrix.identity?)
assert(dokMatrix.identity?)
assert(lilMatrix.identity?)
#Take away a different 1 from each
yaleMatrix[0,0] = 0
dokMatrix[1,1] = 0
lilMatrix[2,2] = 0
assert(!yaleMatrix.identity?)
assert(!dokMatrix.identity?)
assert(!lilMatrix.identity?)
#Add one back, and an extra 1 elsewhere
yaleMatrix[0,0] = 1
yaleMatrix[0,1] = 1
dokMatrix[1,1] = 1
dokMatrix[1,2] = 1
lilMatrix[2,2] = 1
lilMatrix[2,0] = 1
assert(!yaleMatrix.identity?)
assert(!dokMatrix.identity?)
assert(!lilMatrix.identity?)
end
def test_zero
#setup
yaleMatrix = SMatrix.new(Yale.new(3, 3))
dokMatrix = SMatrix.new(Dok.new(3, 3), :dok)
lilMatrix = SMatrix.new(Lil.new(3, 3), :lil)
value = 0
for i in 0..2 do
for j in 0..2 do
yaleMatrix[i, j] = value
dokMatrix[i, j] = value
lilMatrix[i, j] = value
end
end
#Ensure all equal the 3x3 zero matrix
assert(yaleMatrix.zero?)
assert(dokMatrix.zero?)
assert(lilMatrix.zero?)
#Add a 1 to each
yaleMatrix[0,0] = 1
dokMatrix[1,1] = 1
lilMatrix[2,2] = 1
assert(!yaleMatrix.zero?)
assert(!dokMatrix.zero?)
assert(!lilMatrix.zero?)
end
def test_diagonal
#setup
yaleMatrix = SMatrix.new(Yale.new(3, 3))
dokMatrix = SMatrix.new(Dok.new(3, 3), :dok)
lilMatrix = SMatrix.new(Lil.new(3, 3), :lil)
for i in 0..2 do
for j in 0..2 do
if i == j
value = i + 1
else
value = 0
end
yaleMatrix[i, j] = value
dokMatrix[i, j] = value
lilMatrix[i, j] = value
end
end
#Ensure all equal the 3x3 zero matrix
assert(yaleMatrix.diagonal?)
assert(dokMatrix.diagonal?)
assert(lilMatrix.diagonal?)
#Add one back, and an extra 1 elsewhere
yaleMatrix[0,1] = 1
dokMatrix[1,2] = 1
lilMatrix[2,0] = 1
assert(!yaleMatrix.identity?)
assert(!dokMatrix.identity?)
assert(!lilMatrix.identity?)
end
def test_tridiagonal
#setup
yaleMatrix = SMatrix.new(Yale.new(4, 4))
dokMatrix = SMatrix.new(Dok.new(4, 4), :dok)
lilMatrix = SMatrix.new(Lil.new(4, 4), :lil)
for i in 0..3 do
for j in 0..3 do
if i == j or i == j - 1 or i - 1 == j
value = i + 1
else
value = 0
end
yaleMatrix[i, j] = value
dokMatrix[i, j] = value
lilMatrix[i, j] = value
end
end
#Ensure all are tridiagonal
assert(yaleMatrix.tridiagonal?)
assert(dokMatrix.tridiagonal?)
assert(lilMatrix.tridiagonal?)
#Add an extra 1 elsewhere
yaleMatrix[0,2] = 1
dokMatrix[1,3] = 1
lilMatrix[3,1] = 1
assert(!yaleMatrix.tridiagonal?)
assert(!dokMatrix.tridiagonal?)
assert(!lilMatrix.tridiagonal?)
end
def test_symmetry
#setup
yaleMatrix = SMatrix.new(Yale.new(3, 3))
dokMatrix = SMatrix.new(Dok.new(3, 3), :dok)
lilMatrix = SMatrix.new(Lil.new(3, 3), :lil)
for i in 0..2 do
for j in 0..2 do
yaleMatrix[i, j] = rand(6)
yaleMatrix[j, i] = yaleMatrix[i, j]
dokMatrix[i, j] = rand(6)
dokMatrix[j, i] = dokMatrix[i, j]
lilMatrix[i, j] = rand(6)
lilMatrix[j, i] = lilMatrix[i, j]
end
end
assert(yaleMatrix.symmetric?)
assert(dokMatrix.symmetric?)
assert(lilMatrix.symmetric?)
#Randomly change one of the elements...
row = rand(2)
column = rand(2)
until column != row do
column = rand(2)
end
yaleMatrix[row, column] = 7
dokMatrix[row, column] = 7
lilMatrix[row, column] = 7
assert(!yaleMatrix.symmetric?)
assert(!dokMatrix.symmetric?)
assert(!lilMatrix.symmetric?)
end
def test_hermitian
end
def test_normal
end
def test_orthoganal
yaleMatrix = SMatrix.new(Yale.new(2, 2))
dokMatrix = SMatrix.new(Dok.new(2, 2), :dok)
lilMatrix = SMatrix.new(Lil.new(2, 2), :lil)
for i in 0..1 do
for j in 0..1 do
yaleMatrix[i, j] = 0
dokMatrix[i, j] = 0
lilMatrix[i, j] = 0
end
end
yaleMatrix[0,0] = 1
yaleMatrix[1,1] = 1
dokMatrix[0,0] = 1
dokMatrix[1,1] = -1
lilMatrix[0,0] = 1
lilMatrix[1,1] = -1
assert(yaleMatrix.orthogonal?)
assert(dokMatrix.orthogonal?)
assert(lilMatrix.orthogonal?)
yaleMatrix[0,0] = 0
dokMatrix[1,1] = 0
lilMatrix[0,1] = 1
assert(!yaleMatrix.orthogonal?)
assert(!dokMatrix.orthogonal?)
assert(!dokMatrix.orthogonal?)
end
def test_permutation
yaleMatrix = SMatrix.new(Yale.new(3, 3))
dokMatrix = SMatrix.new(Dok.new(3, 3), :dok)
lilMatrix = SMatrix.new(Lil.new(3, 3), :lil)
valueOne = rand(3)
valueTwo = rand(3)
until valueTwo != valueOne do
valueTwo = rand(3)
end
if valueOne != 0 and valueTwo != 0
valueThree = 0
elsif valueOne != 1 and valueTwo != 1
valueThree = 1
else
valueThree = 2
end
for i in 0..2 do
for j in 0..2 do
if (i == 0 and j == valueOne) or (i == 1 and j == valueTwo) or (i == 2 and j == valueThree)
yaleMatrix[i,j] = 1
dokMatrix[i,j] = 1
lilMatrix[i,j] = 1
end
yaleMatrix[i, j] = 0
dokMatrix[i, j] = 0
lilMatrix[i, j] = 0
end
end
#assert(yaleMatrix.permutation?)
#assert(dokMatrix.permutation?)
#assert(lilMatrix.permutation?)
value = rand(2)
if yaleMatrix[0, value] == 1
yaleMatrix[1, value] = 1
else
yaleMatrix[0, value] = 1
end
value = rand(2)
if dokMatrix[0, value] == 1
dokMatrix[1, value] = 1
else
dokMatrix[0, value] = 1
end
value = rand(2)
if lilMatrix[0, value] == 1
lilMatrix[1, value] = 1
else
lilMatrix[0, value] = 1
end
assert(!yaleMatrix.permutation?)
assert(!dokMatrix.permutation?)
assert(!lilMatrix.permutation?)
end
def test_singular
yaleMatrix = SMatrix.new(Yale.new(2, 2))
dokMatrix = SMatrix.new(Dok.new(2, 2), :dok)
lilMatrix = SMatrix.new(Lil.new(2, 2), :lil)
invertedValues = [1, 0.5, 0.25, 0.125, 0.0625]
values = [1, 2, 4, 8, 16]
random = rand(5)
invertedValue = invertedValues[random]
value = values[random]
yaleMatrix[0, 0] = 1
yaleMatrix[0, 1] = invertedValue
yaleMatrix[1, 0] = value
yaleMatrix[1, 1] = 1
dokMatrix[0, 0] = 1
dokMatrix[0, 1] = invertedValue
dokMatrix[1, 0] = value
dokMatrix[1, 1] = 1
lilMatrix[0, 0] = 1
lilMatrix[0, 1] = invertedValue
lilMatrix[1, 0] = value
lilMatrix[1, 1] = 1
assert(yaleMatrix.singular?)
assert(dokMatrix.singular?)
assert(lilMatrix.singular?)
end
def test_regular
yaleMatrix = SMatrix.new(Yale.new(2, 2))
dokMatrix = SMatrix.new(Dok.new(2, 2), :dok)
lilMatrix = SMatrix.new(Lil.new(2, 2), :lil)
yaleMatrix[0, 0] = 0.5
yaleMatrix[0, 1] = 0.5
yaleMatrix[1, 0] = 0.3
yaleMatrix[1, 1] = 0.7
dokMatrix[0, 0] = 0.5
dokMatrix[0, 1] = 0.5
dokMatrix[1, 0] = 0.3
dokMatrix[1, 1] = 0.7
lilMatrix[0, 0] = 0.5
lilMatrix[0, 1] = 0.5
lilMatrix[1, 0] = 0.3
lilMatrix[1, 1] = 0.7
assert(yaleMatrix.regular?)
assert(dokMatrix.regular?)
assert(lilMatrix.regular?)
end
def test_real
yaleMatrix = SMatrix.new(Yale.new(3, 3))
dokMatrix = SMatrix.new(Dok.new(3, 3), :dok)
lilMatrix = SMatrix.new(Lil.new(3, 3), :lil)
for i in 0..2 do
for j in 0..2 do
value = rand(10)
yaleMatrix[i, j] = value
dokMatrix[i, j] = value
lilMatrix[i, j] = value
end
end
assert(yaleMatrix.real?)
assert(dokMatrix.real?)
assert(lilMatrix.real?)
yaleMatrix[rand(3), rand(3)] = Complex(rand(10), rand(10))
dokMatrix[rand(3), rand(3)] = Complex(rand(10), rand(10))
lilMatrix[rand(3), rand(3)] = Complex(rand(10), rand(10))
assert(!yaleMatrix.real?)
assert(!dokMatrix.real?)
assert(!lilMatrix.real?)
end
def test_square
yaleMatrix = SMatrix.new(Yale.new(2, 2))
dokMatrix = SMatrix.new(Dok.new(2, 2), :dok)
lilMatrix = SMatrix.new(Lil.new(2, 2), :lil)
assert(yaleMatrix.square?)
assert(dokMatrix.square?)
assert(lilMatrix.square?)
yaleMatrix2 = SMatrix.new(Yale.new(2, 3))
dokMatrix2 = SMatrix.new(Dok.new(2, 3), :dok)
lilMatrix2 = SMatrix.new(Lil.new(2, 3), :lil)
assert(!yaleMatrix2.square?)
assert(!dokMatrix2.square?)
assert(!lilMatrix2.square?)
end
# def test_unitary
#
# end
def test_upper_triangular
yaleMatrix = SMatrix.new(Yale.new(3, 3))
dokMatrix = SMatrix.new(Dok.new(3, 3), :dok)
lilMatrix = SMatrix.new(Lil.new(3, 3), :lil)
for i in 0..2 do
for j in 0..2 do
value = rand(10)
if j >= i
yaleMatrix[i, j] = value
dokMatrix[i, j] = value
lilMatrix[i, j] = value
else
yaleMatrix[i, j] = 0
dokMatrix[i, j] = 0
lilMatrix[i, j] = 0
end
end
end
assert(yaleMatrix.upper_triangular?)
assert(dokMatrix.upper_triangular?)
assert(lilMatrix.upper_triangular?)
randomRow = rand(2) + 1
randomCol = rand(2)
if randomCol == randomRow
randomCol = 0
end
yaleMatrix[randomRow, randomCol] = rand(9) + 1
dokMatrix[randomRow, randomCol] = rand(9) + 1
lilMatrix[randomRow, randomCol] = rand(9) + 1
assert(!yaleMatrix.upper_triangular?)
assert(!dokMatrix.upper_triangular?)
assert(!lilMatrix.upper_triangular?)
end
def test_lower_triangular
yaleMatrix = SMatrix.new(Yale.new(3, 3))
dokMatrix = SMatrix.new(Dok.new(3, 3), :dok)
lilMatrix = SMatrix.new(Lil.new(3, 3), :lil)
for i in 0..2 do
for j in 0..2 do
value = rand(10)
if i >= j
yaleMatrix[i, j] = value
dokMatrix[i, j] = value
lilMatrix[i, j] = value
else
yaleMatrix[i, j] = 0
dokMatrix[i, j] = 0
lilMatrix[i, j] = 0
end
end
end
assert yaleMatrix.lower_triangular?
assert dokMatrix.lower_triangular?
assert lilMatrix.lower_triangular?
randomRow = rand(2)
randomCol = rand(2) + 1
if randomCol == randomRow
randomRow = 0
end
yaleMatrix[randomRow, randomCol] = rand(9) + 1
dokMatrix[randomRow, randomCol] = rand(9) + 1
lilMatrix[randomRow, randomCol] = rand(9) + 1
assert !yaleMatrix.lower_triangular?
assert !dokMatrix.lower_triangular?
assert !lilMatrix.lower_triangular?
end
end
| 20.076779 | 95 | 0.642384 |
38364a0b151289a753e1a7a3d610f228fc91f4c4 | 1,246 | module Gitlab
class SnippetSearchResults < SearchResults
include SnippetsHelper
attr_reader :limit_snippet_ids
def initialize(limit_snippet_ids, query)
@limit_snippet_ids = limit_snippet_ids
@query = query
end
def objects(scope, page = nil)
case scope
when 'snippet_titles'
Kaminari.paginate_array(snippet_titles).page(page).per(per_page)
when 'snippet_blobs'
Kaminari.paginate_array(snippet_blobs).page(page).per(per_page)
else
super
end
end
def total_count
@total_count ||= snippet_titles_count + snippet_blobs_count
end
def snippet_titles_count
@snippet_titles_count ||= snippet_titles.count
end
def snippet_blobs_count
@snippet_blobs_count ||= snippet_blobs.count
end
private
def snippet_titles
Snippet.where(id: limit_snippet_ids).search(query).order('updated_at DESC')
end
def snippet_blobs
search = Snippet.where(id: limit_snippet_ids).search_code(query)
search = search.order('updated_at DESC').to_a
snippets = []
search.each { |e| snippets << chunk_snippet(e) }
snippets
end
def default_scope
'snippet_blobs'
end
end
end
| 23.074074 | 81 | 0.679775 |
e82c74b3875948d5e573e9fdef54acadeb5b3df3 | 1,545 | require_relative "../canvas_base_mutation"
require_relative "../../types/canvas/group_category"
module LMSGraphQL
module Mutations
module Canvas
class CreateGroupCategoryAccount < BaseMutation
argument :account_id, ID, required: true
argument :name, String, required: true
argument :self_signup, String, required: false
argument :auto_leader, String, required: false
argument :group_limit, Int, required: false
argument :sis_group_category_id, ID, required: false
argument :create_group_count, Int, required: false
argument :split_group_count, String, required: false
field :group_category, LMSGraphQL::Types::Canvas::CanvasGroupCategory, null: false
def resolve(account_id:, name:, self_signup: nil, auto_leader: nil, group_limit: nil, sis_group_category_id: nil, create_group_count: nil, split_group_count: nil)
context[:canvas_api].call("CREATE_GROUP_CATEGORY_ACCOUNTS").proxy(
"CREATE_GROUP_CATEGORY_ACCOUNTS",
{
"account_id": account_id
},
{
"name": name,
"self_signup": self_signup,
"auto_leader": auto_leader,
"group_limit": group_limit,
"sis_group_category_id": sis_group_category_id,
"create_group_count": create_group_count,
"split_group_count": split_group_count
},
).parsed_response
end
end
end
end
end | 39.615385 | 170 | 0.638835 |
1c1206c1cbb0000c4a0caa66b5325b0e2a7fed25 | 5,002 | Ледяные дороги в местах, но в основном временными.
В нескольких местах в Чешской Республике, главной дороги ледяные и снежные.
Однако, большинство дорог являются временными, с дополнительной помощи, необходимой в таких местах.
Carlsbad региона
В своём регионе, дороги были использованы сегодня утром, хотя в некоторых местах они были ледяные и снежные.
Температура упала до 5-10 градусов ниже нуля, хотя ожидается получить теплый слегка в течение дня.
Снег в регионе остановилось, и лишь тонкий слой снега остается в низинах.
Однако, гряды Крушных горах уже около 30 сантиметров снега.
В некоторых местах есть ограниченной видимости из-за тумана, по данным местных автомобильных номеров.
На R6 высокоскоростной магистрали и основных дорог в регионе теперь доступны без ограничений.
Осторожность, конечно, целесообразно, например, на некоторых мостов, где поверхность может быть ледяной и скользкой.
Все вторичной и третичной также проходимых дорог, в том числе по горным дорогам.
В некоторых эти участки дорог здесь могут быть заморожены, а оставшиеся кряду выпадение снега.
Прежде всего, на более высоких уровнях, особое внимание должно быть принято во время вождения.
Пардубице и Градец Кралове региона
На некоторых дорогах в Восточной Чехии, там могут грозить гололед, на больших высотах и в горах там может быть слой уплотняется снег, по данным управления дороги и автомагистрали.
Шоссе, предупреждает водителей против Black Ice, которые могут возникнуть на больших высотах в Пардубице региона, в частности.
Черный лед может возникнуть вокруг ланскрун, Usti nad орлици, полики, Свитавы и Vysoke myto, и особенно на вторичной и третичной дорог.
I / 43 и я / 34 дороги были химически очищенной вокруг Свитавы.
Снег - особенно влияющих на дорогах в Крконошах и Орлицких горах.
На больших высотах, есть кряду слой снега на дорогах вокруг ричинов над кнежну и Trutnov.
В Восточной Чехии в день будет в основном ясно, частично пасмурные и сухой.
Температура воздуха составит от минус трех до плюс одного градуса по Цельсию, в основном, с легкой ветра.
Пльзень региона
Дороги в Пльзене региона были использованы сегодня утром, с дополнительной помощи нуждались в некоторых местах. Водители должны принять во внимание погодные условия.
Утром будет морозным, с температурой, начиная от трех до девяти градусов ниже нуля.
Из-за существующих снег и последующее падение температуры, некоторые дороги может быть скользким.
Водители должны ожидать, местами туман, видимость будут постепенно улучшаться.
Эту информацию сообщили в регионе шоссе.
Автострады D5 - драйвабель почти без ограничений, но дорожные службы предоставит дополнительные осторожности между 80 и 131st километре знаки.
Большинство основной дорожной поверхности - сухой и мороза.
Южные районы Пльзень и tachov регионы могут иметь ледяной пластырей.
Вторичной и третичной мокрой дороги, и, возможно, поэтому также ледяной пластырей.
Водителям следует быть осторожными, особенно на менее посещаемых дорог в чешском леса.
Оломоуц региона
Водителям следует ожидать шугу снег на дорогах, если двигаться по большей части Оломоуца региона.
Это результат химической обработки проведены на червеногорксе седловины и на пути к видельский криз.
Snowploughs были привезены из падающего снега в одночасье, сумперк региона, по шоссе, получил около трех сантиметров снега.
В других частях региона, дороги в основном проходимых без ограничений.
"в регионе сумперк, следы на снегу остались на самом высоком высотах.
Водителям следует ожидать в шугу снег червеногорске седловины вынесенный в направлении ", - говорится в сообщении на шоссе сумперк офицер службы рассказал CTK сегодня.
Вынесенный их коллеги также отправятся в одночасье; дороги весь путь к высшей высоте становится ясным и влажной после химической обработке, согласно их.
В Оломоуце дорог региона работают без ограничений, в то время как в области чешский штернберк водители должны остерегаться в лесистой местности, где дороги оставались влажными.
Usti nad Labem региона, Либерец регион
С этим утром, сноуплугов сообщили несколько мест, которые трудно пройти в северной Богемии.
Кроме того, некоторые заснеженных местах, или некоторые патчи пятнадцатиградусный мороз, горной дороге из тельнице к книнице в Usti nad Labem региона также закрыты, согласно базе данных полиции.
Температура воздуха останется ниже нуля и дорог, скорее всего, останется снежные и ледяные. В низинах, однако, особенно в юго-восточной части Центральной богемный нагорье, нет никаких проблем и дороги в основном сухой.
Никакого движения перебои пока не сообщается.
Ледяной холод патчи были представлены, в частности, по содержанию дорог штети.
По данным метеорологов условия для этого были идеальным - дожди и таяние снега в течение дня, с четкой и ночные заморозки.
Неблагоприятные погодные условия ожидаются на основных разделов I / 13 дороги между Usti nad Labem и Либерец регионов.
Закрытие тельнице к книнице дорога была вызвана изогнутых веток деревьев, которые были взвешены до уровня дорожного движения, снегопад.
| 94.377358 | 218 | 0.828669 |
bb84635b45ccdc87448d3f7eb41231444f52de2a | 148 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_blog-api-example_session'
| 37 | 86 | 0.804054 |
332a9d532d9831d5fb2f7e2a400c25faebd96965 | 3,619 | require 'rails_helper'
include DmKnowledge::SkmlHelper
describe DmKnowledge::SkmlHelper do
#------------------------------------------------------------------------------
describe "skml_to_html" do
let(:skml) do
<<-SKML
<!--: data-who="someone" data-srcid="1.x" -->
Some text to convert
<!--: data-who="someone" data-srcid="1.x" -->
<foreign>basha</foreign> is telugu
<!--: data-who="someone" data-srcid="1.x" -->
<mantra>
some mantra text
</mantra>
<!--: data-who="someone" data-srcid="1.x" -->
This is a <name>Name</name>
<!--: data-who="someone" data-srcid="1.x" -->
And <verse data-srcid="1.x.y">this is a verse</verse>
<!--: data-who="someone" data-srcid="1.x" -->
And this is <redact>redacted</redact>
<!--: data-who="someone" data-srcid="1.x" -->
<golden>A golden statment</golden>
And **some** regular _Markdown_
SKML
end
let(:skml_converted) do
<<-SKML
<p data-who="someone" data-srcid="1.x">Some text to convert</p>
<p data-who="someone" data-srcid="1.x"><span data-foreign="">basha</span> is telugu</p>
<p data-who="someone" data-srcid="1.x"><span data-mantra="">
some mantra text
</span></p>
<p data-who="someone" data-srcid="1.x">This is a <span data-name="">Name</span></p>
<p data-who="someone" data-srcid="1.x">And <span data-verse="" data-srcid="1.x.y">this is a verse</span></p>
<p data-who="someone" data-srcid="1.x">And this is <span data-redact=""> </span></p>
<p data-who="someone" data-srcid="1.x"><q data-golden="">A golden statment</q></p>
<p>And <strong>some</strong> regular <em>Markdown</em></p>
SKML
end
it "converts test skml into html" do
expect(skml_to_html(skml)).to eq skml_converted
end
it "changes text in redact tag" do
redact = "Some <redact>redacted</redact> text"
redacted = 'Some <span data-redact=""> </span> text'
redacted2 = 'Some <span data-redact="">********</span> text'
expect(skml_redact(redact)).to eq redacted
expect(skml_redact(redact, '*')).to eq redacted2
end
end
#------------------------------------------------------------------------------
describe "add_tags_to_verses" do
let(:skml_pre_tagged) do
<<-SKML
<p data-srcid="1.12">one</p>
<p data-srcid="1.20">two</p>
<p data-srcid="1.21">three</p>
SKML
end
let(:skml_tagged) do
<<-SKML
<p data-srcid="1.12" data-tags='["foobar","shoe"]' data-tagid='s_1_12'>one</p>
<p data-srcid="1.20" data-tags='["one","two"]' data-tagid='s_1_20'>two</p>
<p data-srcid="1.21">three</p>
SKML
end
it "looksup and adds tags to html" do
document = create(:document, content: skml_pre_tagged)
document.set_tag_list_on(DmKnowledge::Document.tagcontext_from_srcid('1.12'), "foobar, shoe")
document.save
document.set_tag_list_on(DmKnowledge::Document.tagcontext_from_srcid('1.20'), "one, two")
document.save
tagged_text = add_tags_to_verses(document, document.content)
expect(tagged_text).to eq skml_tagged
end
end
#------------------------------------------------------------------------------
describe "skml_set_srcids" do
let(:skml_non_numbered) do
<<-SKML
<p data-srcid="1.x">one</p>
<p data-srcid = "1.x">two</p>
<p data-srcid="2.x">three<span data-srcid="2.x.y">four</span></p>
SKML
end
let(:skml_numbered) do
<<-SKML
<p data-srcid="1.1">one</p>
<p data-srcid="1.2">two</p>
<p data-srcid="2.1">three<span data-srcid="2.x.y">four</span></p>
SKML
end
it "auto-numbers the srcids" do
expect(skml_set_srcids(skml_non_numbered)).to eq skml_numbered
end
end
end | 28.722222 | 108 | 0.604034 |
01de010eda1108a2b26dfd352afee69ecaaedd32 | 2,117 | class NewsReleasesController < ApplicationController
before_action :authenticate, except: [:index, :show]
before_action :set_news_release, only: [:show, :edit, :update, :destroy]
# GET /news_releases
# GET /news_releases.json
def index
@news_releases = NewsRelease.all
end
# GET /news_releases/1
# GET /news_releases/1.json
def show
end
# GET /news_releases/new
def new
@news_release = NewsRelease.new
end
# GET /news_releases/1/edit
def edit
end
# POST /news_releases
# POST /news_releases.json
def create
@news_release = NewsRelease.new(news_release_params)
respond_to do |format|
if @news_release.save
format.html { redirect_to news_releases_url,
notice: 'Successfully created news release.' }
format.json { render action: 'show', status: :created, location: @news_release }
else
format.html { render action: 'new' }
format.json { render json: @news_release.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /news_releases/1
# PATCH/PUT /news_releases/1.json
def update
respond_to do |format|
if @news_release.update(news_release_params)
format.html { redirect_to @news_release, notice: 'News release was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: 'edit' }
format.json { render json: @news_release.errors, status: :unprocessable_entity }
end
end
end
# DELETE /news_releases/1
# DELETE /news_releases/1.json
def destroy
@news_release.destroy
respond_to do |format|
format.html { redirect_to news_releases_url }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_news_release
@news_release = NewsRelease.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def news_release_params
params.require(:news_release).permit(:title, :released_on, :body)
end
end
| 27.493506 | 99 | 0.685876 |
5df0561276657c6cc9aade16744469c5dc2fe6d7 | 2,684 | #
# Author:: Adam Jacob (<[email protected]>)
# Author:: Seth Falcon (<[email protected]>)
# Copyright:: Copyright 2009-2016, Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/knife"
require "chef/knife/data_bag_secret_options"
class Chef
class Knife
class DataBagShow < Knife
include DataBagSecretOptions
deps do
require "chef/data_bag"
require "chef/encrypted_data_bag_item"
end
banner "knife data bag show BAG [ITEM] (options)"
category "data bag"
def run
display = case @name_args.length
when 2 # Bag and Item names provided
secret = encryption_secret_provided_ignore_encrypt_flag? ? read_secret : nil
raw_data = Chef::DataBagItem.load(@name_args[0], @name_args[1]).raw_data
encrypted = encrypted?(raw_data)
if encrypted && secret
# Users do not need to pass --encrypt to read data, we simply try to use the provided secret
ui.info("Encrypted data bag detected, decrypting with provided secret.")
raw = Chef::EncryptedDataBagItem.load(@name_args[0],
@name_args[1],
secret)
format_for_display(raw.to_hash)
elsif encrypted && !secret
ui.warn("Encrypted data bag detected, but no secret provided for decoding. Displaying encrypted data.")
format_for_display(raw_data)
else
ui.warn("Unencrypted data bag detected, ignoring any provided secret options.") if secret
format_for_display(raw_data)
end
when 1 # Only Bag name provided
format_list_for_display(Chef::DataBag.load(@name_args[0]))
else
stdout.puts opt_parser
exit(1)
end
output(display)
end
end
end
end
| 38.342857 | 125 | 0.585693 |
18008c191280180e313e727c3fdef29c2468ae51 | 292 | # frozen_string_literal: true
module ExceptionTrack
class Engine < ::Rails::Engine
isolate_namespace ExceptionTrack
initializer "exception-track.assets.precompile", group: :all do |app|
app.config.assets.precompile += %w( exception-track/application.css )
end
end
end
| 24.333333 | 75 | 0.736301 |
1d193d6e7cdee4a05762aa437d2f6e486ae7dcf6 | 931 | # encoding: utf-8
class Admin::RewardsController < Admin::BaseController
before_filter :find_parent_user, :except => [:index]
def index
@rewards = Reward.order('created_at DESC').page(params[:page])
@title = '奖励记录'
end
def new
respond_to do |f|
f.js {
@reward_type = params[:reward_type].present? ? params[:reward_type] : Reward::TYPE_GRANT
@reward = @user.rewards.build(:reward_type => @reward_type, :amount_str => '0')
}
end
end
def create
respond_to do |f|
f.js {
@reward = @user.rewards.build(params[:reward])
@reward_type = @reward.reward_type
@reward.admin_user = current_user
result = Reward.transaction do
@reward.save && @user.update_attributes({:reward => @user.reward + @reward.amount}, :as => current_user.permission_role)
end
render :new and return unless result
}
end
end
end
| 26.6 | 130 | 0.628357 |
1de4272ed01df32eea16c47c37422464c343186e | 123 | module Sofa
module Version
MAJOR = 0
MINOR = 1
PATCH = 4
STRING = "#{MAJOR}.#{MINOR}.#{PATCH}"
end
end
| 13.666667 | 41 | 0.552846 |
0170171cd9505312ca9d39201fbfd82d6241d79d | 271 | module Typhoeus
class Response
module Cacheable
# Set the cache status, if we got response from cache
# it will have cached? == true
attr_writer :cached
def cached?
defined?(@cached) ? !!@cached : false
end
end
end
end
| 18.066667 | 59 | 0.608856 |
337cb30fbabc24577e507da8827eda1ac3c54195 | 844 | #
# Copyright (c) 2015 Nordstrom, Inc.
#
require 'spec_helper'
software = OHAI['software']
describe 'software plugin' do
it 'should add software to the plugin directory'do
expect(file '/etc/chef/ohai_plugins/software.rb').to be_file
end
it 'vas should not be installed'do
expect(software['vas']['installed']).to eql(false)
end
it 'vas version should be nil'do
expect(software['vas']['version']).to eql(nil)
end
it 'vmware should not be installed'do
expect(software['vmware']['installed']).to eql(false)
end
it 'vmware version should be nil'do
expect(software['vmware']['version']).to eql(nil)
end
it 'vxfs should not be installed'do
expect(software['vxfs']['installed']).to eql(false)
end
it 'vxfs version should be nil'do
expect(software['vxfs']['version']).to eql(nil)
end
end
| 22.210526 | 64 | 0.683649 |
ac622049c124ba55bc61f4d94a9c4040229f4318 | 1,122 | class ArchiveTransactionsService
attr_reader :user, :period_started_at, :period_ended_at
def initialize(user, archive_date)
@user = user
@period_started_at = archive_date.beginning_of_month
@period_ended_at = archive_date.end_of_month
end
def archive
AggregatedTransaction.transaction do
delete_aggregations_for_current_period
user_transactions_for_period.find_each do |t|
at = user.aggregated_transactions.where(
period_started_at: period_started_at,
period_ended_at: period_ended_at,
category_id: t.category_id,
category_type_id: t.category_type_id,
currency_id: t.account_currency_id
).first_or_initialize
at.amount = at.amount.to_f + t.summ
at.save
end
end
end
private
def user_transactions_for_period
user.transactions.where(created_at: period_started_at..period_ended_at)
end
def delete_aggregations_for_current_period
user.aggregated_transactions.where(
period_started_at: period_started_at,
period_ended_at: period_ended_at
).delete_all
end
end
| 28.05 | 75 | 0.737077 |
79bd643142ec8c4f57000fe238e2b5bd56abe734 | 27,799 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module TpuV1alpha1
# A accelerator type that a Node can be configured with.
class AcceleratorType
include Google::Apis::Core::Hashable
# The resource name.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# the accelerator type.
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@name = args[:name] if args.key?(:name)
@type = args[:type] if args.key?(:type)
end
end
# A generic empty message that you can re-use to avoid defining duplicated empty
# messages in your APIs. A typical example is to use it as the request or the
# response type of an API method. For instance: service Foo ` rpc Bar(google.
# protobuf.Empty) returns (google.protobuf.Empty); ` The JSON representation for
# `Empty` is empty JSON object ````.
class Empty
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Response for ListAcceleratorTypes.
class ListAcceleratorTypesResponse
include Google::Apis::Core::Hashable
# The listed nodes.
# Corresponds to the JSON property `acceleratorTypes`
# @return [Array<Google::Apis::TpuV1alpha1::AcceleratorType>]
attr_accessor :accelerator_types
# The next page token or empty if none.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# Locations that could not be reached.
# Corresponds to the JSON property `unreachable`
# @return [Array<String>]
attr_accessor :unreachable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@accelerator_types = args[:accelerator_types] if args.key?(:accelerator_types)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@unreachable = args[:unreachable] if args.key?(:unreachable)
end
end
# The response message for Locations.ListLocations.
class ListLocationsResponse
include Google::Apis::Core::Hashable
# A list of locations that matches the specified filter in the request.
# Corresponds to the JSON property `locations`
# @return [Array<Google::Apis::TpuV1alpha1::Location>]
attr_accessor :locations
# The standard List next-page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@locations = args[:locations] if args.key?(:locations)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# Response for ListNodes.
class ListNodesResponse
include Google::Apis::Core::Hashable
# The next page token or empty if none.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# The listed nodes.
# Corresponds to the JSON property `nodes`
# @return [Array<Google::Apis::TpuV1alpha1::Node>]
attr_accessor :nodes
# Locations that could not be reached.
# Corresponds to the JSON property `unreachable`
# @return [Array<String>]
attr_accessor :unreachable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@nodes = args[:nodes] if args.key?(:nodes)
@unreachable = args[:unreachable] if args.key?(:unreachable)
end
end
# The response message for Operations.ListOperations.
class ListOperationsResponse
include Google::Apis::Core::Hashable
# The standard List next-page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# A list of operations that matches the specified filter in the request.
# Corresponds to the JSON property `operations`
# @return [Array<Google::Apis::TpuV1alpha1::Operation>]
attr_accessor :operations
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@operations = args[:operations] if args.key?(:operations)
end
end
# Response for ListTensorFlowVersions.
class ListTensorFlowVersionsResponse
include Google::Apis::Core::Hashable
# The next page token or empty if none.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# The listed nodes.
# Corresponds to the JSON property `tensorflowVersions`
# @return [Array<Google::Apis::TpuV1alpha1::TensorFlowVersion>]
attr_accessor :tensorflow_versions
# Locations that could not be reached.
# Corresponds to the JSON property `unreachable`
# @return [Array<String>]
attr_accessor :unreachable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@tensorflow_versions = args[:tensorflow_versions] if args.key?(:tensorflow_versions)
@unreachable = args[:unreachable] if args.key?(:unreachable)
end
end
# A resource that represents Google Cloud Platform location.
class Location
include Google::Apis::Core::Hashable
# The friendly name for this location, typically a nearby city name. For example,
# "Tokyo".
# Corresponds to the JSON property `displayName`
# @return [String]
attr_accessor :display_name
# Cross-service attributes for the location. For example `"cloud.googleapis.com/
# region": "us-east1"`
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# The canonical id for this location. For example: `"us-east1"`.
# Corresponds to the JSON property `locationId`
# @return [String]
attr_accessor :location_id
# Service-specific metadata. For example the available capacity at the given
# location.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,Object>]
attr_accessor :metadata
# Resource name for the location, which may vary between implementations. For
# example: `"projects/example-project/locations/us-east1"`
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@display_name = args[:display_name] if args.key?(:display_name)
@labels = args[:labels] if args.key?(:labels)
@location_id = args[:location_id] if args.key?(:location_id)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
end
end
# A network endpoint over which a TPU worker can be reached.
class NetworkEndpoint
include Google::Apis::Core::Hashable
# The IP address of this network endpoint.
# Corresponds to the JSON property `ipAddress`
# @return [String]
attr_accessor :ip_address
# The port of this network endpoint.
# Corresponds to the JSON property `port`
# @return [Fixnum]
attr_accessor :port
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@ip_address = args[:ip_address] if args.key?(:ip_address)
@port = args[:port] if args.key?(:port)
end
end
# A TPU instance.
class Node
include Google::Apis::Core::Hashable
# Required. The type of hardware accelerators associated with this node.
# Corresponds to the JSON property `acceleratorType`
# @return [String]
attr_accessor :accelerator_type
# The CIDR block that the TPU node will use when selecting an IP address. This
# CIDR block must be a /29 block; the Compute Engine networks API forbids a
# smaller block, and using a larger block would be wasteful (a node can only
# consume one IP address). Errors will occur if the CIDR block has already been
# used for a currently existing TPU node, the CIDR block conflicts with any
# subnetworks in the user's provided network, or the provided network is peered
# with another network that is using that CIDR block.
# Corresponds to the JSON property `cidrBlock`
# @return [String]
attr_accessor :cidr_block
# Output only. The time when the node was created.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# The user-supplied description of the TPU. Maximum of 512 characters.
# Corresponds to the JSON property `description`
# @return [String]
attr_accessor :description
# The health status of the TPU node.
# Corresponds to the JSON property `health`
# @return [String]
attr_accessor :health
# Output only. If this field is populated, it contains a description of why the
# TPU Node is unhealthy.
# Corresponds to the JSON property `healthDescription`
# @return [String]
attr_accessor :health_description
# Output only. DEPRECATED! Use network_endpoints instead. The network address
# for the TPU Node as visible to Compute Engine instances.
# Corresponds to the JSON property `ipAddress`
# @return [String]
attr_accessor :ip_address
# Resource labels to represent user-provided metadata.
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Output only. Immutable. The name of the TPU
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The name of a network they wish to peer the TPU node to. It must be a
# preexisting Compute Engine network inside of the project on which this API has
# been activated. If none is provided, "default" will be used.
# Corresponds to the JSON property `network`
# @return [String]
attr_accessor :network
# Output only. The network endpoints where TPU workers can be accessed and sent
# work. It is recommended that Tensorflow clients of the node reach out to the
# 0th entry in this map first.
# Corresponds to the JSON property `networkEndpoints`
# @return [Array<Google::Apis::TpuV1alpha1::NetworkEndpoint>]
attr_accessor :network_endpoints
# Output only. DEPRECATED! Use network_endpoints instead. The network port for
# the TPU Node as visible to Compute Engine instances.
# Corresponds to the JSON property `port`
# @return [String]
attr_accessor :port
# Sets the scheduling options for this node.
# Corresponds to the JSON property `schedulingConfig`
# @return [Google::Apis::TpuV1alpha1::SchedulingConfig]
attr_accessor :scheduling_config
# Output only. The service account used to run the tensor flow services within
# the node. To share resources, including Google Cloud Storage data, with the
# Tensorflow job running in the Node, this account must have permissions to that
# data.
# Corresponds to the JSON property `serviceAccount`
# @return [String]
attr_accessor :service_account
# Output only. The current state for the TPU Node.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# Output only. The Symptoms that have occurred to the TPU Node.
# Corresponds to the JSON property `symptoms`
# @return [Array<Google::Apis::TpuV1alpha1::Symptom>]
attr_accessor :symptoms
# Required. The version of Tensorflow running in the Node.
# Corresponds to the JSON property `tensorflowVersion`
# @return [String]
attr_accessor :tensorflow_version
# Whether the VPC peering for the node is set up through Service Networking API.
# The VPC Peering should be set up before provisioning the node. If this field
# is set, cidr_block field should not be specified. If the network, that you
# want to peer the TPU Node to, is Shared VPC networks, the node must be created
# with this this field enabled.
# Corresponds to the JSON property `useServiceNetworking`
# @return [Boolean]
attr_accessor :use_service_networking
alias_method :use_service_networking?, :use_service_networking
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@accelerator_type = args[:accelerator_type] if args.key?(:accelerator_type)
@cidr_block = args[:cidr_block] if args.key?(:cidr_block)
@create_time = args[:create_time] if args.key?(:create_time)
@description = args[:description] if args.key?(:description)
@health = args[:health] if args.key?(:health)
@health_description = args[:health_description] if args.key?(:health_description)
@ip_address = args[:ip_address] if args.key?(:ip_address)
@labels = args[:labels] if args.key?(:labels)
@name = args[:name] if args.key?(:name)
@network = args[:network] if args.key?(:network)
@network_endpoints = args[:network_endpoints] if args.key?(:network_endpoints)
@port = args[:port] if args.key?(:port)
@scheduling_config = args[:scheduling_config] if args.key?(:scheduling_config)
@service_account = args[:service_account] if args.key?(:service_account)
@state = args[:state] if args.key?(:state)
@symptoms = args[:symptoms] if args.key?(:symptoms)
@tensorflow_version = args[:tensorflow_version] if args.key?(:tensorflow_version)
@use_service_networking = args[:use_service_networking] if args.key?(:use_service_networking)
end
end
# This resource represents a long-running operation that is the result of a
# network API call.
class Operation
include Google::Apis::Core::Hashable
# If the value is `false`, it means the operation is still in progress. If `true`
# , the operation is completed, and either `error` or `response` is available.
# Corresponds to the JSON property `done`
# @return [Boolean]
attr_accessor :done
alias_method :done?, :done
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
# data: error code, error message, and error details. You can find out more
# about this error model and how to work with it in the [API Design Guide](https:
# //cloud.google.com/apis/design/errors).
# Corresponds to the JSON property `error`
# @return [Google::Apis::TpuV1alpha1::Status]
attr_accessor :error
# Service-specific metadata associated with the operation. It typically contains
# progress information and common metadata such as create time. Some services
# might not provide such metadata. Any method that returns a long-running
# operation should document the metadata type, if any.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,Object>]
attr_accessor :metadata
# The server-assigned name, which is only unique within the same service that
# originally returns it. If you use the default HTTP mapping, the `name` should
# be a resource name ending with `operations/`unique_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The normal response of the operation in case of success. If the original
# method returns no data on success, such as `Delete`, the response is `google.
# protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`,
# the response should be the resource. For other methods, the response should
# have the type `XxxResponse`, where `Xxx` is the original method name. For
# example, if the original method name is `TakeSnapshot()`, the inferred
# response type is `TakeSnapshotResponse`.
# Corresponds to the JSON property `response`
# @return [Hash<String,Object>]
attr_accessor :response
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@done = args[:done] if args.key?(:done)
@error = args[:error] if args.key?(:error)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
@response = args[:response] if args.key?(:response)
end
end
# Represents the metadata of the long-running operation.
class OperationMetadata
include Google::Apis::Core::Hashable
# [Output only] API version used to start the operation.
# Corresponds to the JSON property `apiVersion`
# @return [String]
attr_accessor :api_version
# [Output only] Identifies whether the user has requested cancellation of the
# operation. Operations that have successfully been cancelled have Operation.
# error value with a google.rpc.Status.code of 1, corresponding to `Code.
# CANCELLED`.
# Corresponds to the JSON property `cancelRequested`
# @return [Boolean]
attr_accessor :cancel_requested
alias_method :cancel_requested?, :cancel_requested
# [Output only] The time the operation was created.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# [Output only] The time the operation finished running.
# Corresponds to the JSON property `endTime`
# @return [String]
attr_accessor :end_time
# [Output only] Human-readable status of the operation, if any.
# Corresponds to the JSON property `statusDetail`
# @return [String]
attr_accessor :status_detail
# [Output only] Server-defined resource path for the target of the operation.
# Corresponds to the JSON property `target`
# @return [String]
attr_accessor :target
# [Output only] Name of the verb executed by the operation.
# Corresponds to the JSON property `verb`
# @return [String]
attr_accessor :verb
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@api_version = args[:api_version] if args.key?(:api_version)
@cancel_requested = args[:cancel_requested] if args.key?(:cancel_requested)
@create_time = args[:create_time] if args.key?(:create_time)
@end_time = args[:end_time] if args.key?(:end_time)
@status_detail = args[:status_detail] if args.key?(:status_detail)
@target = args[:target] if args.key?(:target)
@verb = args[:verb] if args.key?(:verb)
end
end
# Request for ReimageNode.
class ReimageNodeRequest
include Google::Apis::Core::Hashable
# The version for reimage to create.
# Corresponds to the JSON property `tensorflowVersion`
# @return [String]
attr_accessor :tensorflow_version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@tensorflow_version = args[:tensorflow_version] if args.key?(:tensorflow_version)
end
end
# Sets the scheduling options for this node.
class SchedulingConfig
include Google::Apis::Core::Hashable
# Defines whether the node is preemptible.
# Corresponds to the JSON property `preemptible`
# @return [Boolean]
attr_accessor :preemptible
alias_method :preemptible?, :preemptible
# Whether the node is created under a reservation.
# Corresponds to the JSON property `reserved`
# @return [Boolean]
attr_accessor :reserved
alias_method :reserved?, :reserved
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@preemptible = args[:preemptible] if args.key?(:preemptible)
@reserved = args[:reserved] if args.key?(:reserved)
end
end
# Request for StartNode.
class StartNodeRequest
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
# data: error code, error message, and error details. You can find out more
# about this error model and how to work with it in the [API Design Guide](https:
# //cloud.google.com/apis/design/errors).
class Status
include Google::Apis::Core::Hashable
# The status code, which should be an enum value of google.rpc.Code.
# Corresponds to the JSON property `code`
# @return [Fixnum]
attr_accessor :code
# A list of messages that carry the error details. There is a common set of
# message types for APIs to use.
# Corresponds to the JSON property `details`
# @return [Array<Hash<String,Object>>]
attr_accessor :details
# A developer-facing error message, which should be in English. Any user-facing
# error message should be localized and sent in the google.rpc.Status.details
# field, or localized by the client.
# Corresponds to the JSON property `message`
# @return [String]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@code = args[:code] if args.key?(:code)
@details = args[:details] if args.key?(:details)
@message = args[:message] if args.key?(:message)
end
end
# Request for StopNode.
class StopNodeRequest
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# A Symptom instance.
class Symptom
include Google::Apis::Core::Hashable
# Timestamp when the Symptom is created.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# Detailed information of the current Symptom.
# Corresponds to the JSON property `details`
# @return [String]
attr_accessor :details
# Type of the Symptom.
# Corresponds to the JSON property `symptomType`
# @return [String]
attr_accessor :symptom_type
# A string used to uniquely distinguish a worker within a TPU node.
# Corresponds to the JSON property `workerId`
# @return [String]
attr_accessor :worker_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@create_time = args[:create_time] if args.key?(:create_time)
@details = args[:details] if args.key?(:details)
@symptom_type = args[:symptom_type] if args.key?(:symptom_type)
@worker_id = args[:worker_id] if args.key?(:worker_id)
end
end
# A tensorflow version that a Node can be configured with.
class TensorFlowVersion
include Google::Apis::Core::Hashable
# The resource name.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# the tensorflow version.
# Corresponds to the JSON property `version`
# @return [String]
attr_accessor :version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@name = args[:name] if args.key?(:name)
@version = args[:version] if args.key?(:version)
end
end
end
end
end
| 38.609722 | 103 | 0.617396 |
7acebb6455e9d4d9efbec621373d289498675ce1 | 662 | class PokemonsController < ApplicationController
layout "main"
def index
if params[:searching_for]
p = params[:searching_for]
if p == "types"
@pokemons = Pokemon.type_search(params[:query])
elsif p == "name"
@pokemons = Pokemon.name_search(params[:query])
else
@pokemons = Pokemon.number_search(params[:query])
end
else
@pokemons = Pokemon.all
end
end
def show
@pokemon = Pokemon.find_by(id: params[:id])
end
def mypokemons
@pokemons = current_user.pokemons
end
end | 25.461538 | 65 | 0.539275 |
21502271ac4c77f04862e6db2d4f149066924b47 | 3,307 | # == Schema Information
#
# Table name: stops
#
# id :integer not null, primary key
# name :string
#
# Table name: routes
#
# num :string not null, primary key
# company :string not null, primary key
# pos :integer not null, primary key
# stop_id :integer
require_relative './sqlzoo.rb'
def num_stops
# How many stops are in the database?
execute(<<-SQL)
SELECT
COUNT(*)
FROM
stops;
SQL
end
def craiglockhart_id
# Find the id value for the stop 'Craiglockhart'.
execute(<<-SQL)
SELECT
stops.id
FROM
stops
WHERE
stops.name = 'Craiglockhart';
SQL
end
def lrt_stops
# Give the id and the name for the stops on the '4' 'LRT' service.
execute(<<-SQL)
SQL
end
def connecting_routes
# Consider the following query:
#
# SELECT
# company,
# num,
# COUNT(*)
# FROM
# routes
# WHERE
# stop_id = 149 OR stop_id = 53
# GROUP BY
# company, num
#
# The query gives the number of routes that visit either London Road
# (149) or Craiglockhart (53). Run the query and notice the two services
# that link these stops have a count of 2. Add a HAVING clause to restrict
# the output to these two routes.
execute(<<-SQL)
SQL
end
def cl_to_lr
# Consider the query:
#
# SELECT
# a.company,
# a.num,
# a.stop_id,
# b.stop_id
# FROM
# routes a
# JOIN
# routes b ON (a.company = b.company AND a.num = b.num)
# WHERE
# a.stop_id = 53
#
# Observe that b.stop_id gives all the places you can get to from
# Craiglockhart, without changing routes. Change the query so that it
# shows the services from Craiglockhart to London Road.
execute(<<-SQL)
SQL
end
def cl_to_lr_by_name
# Consider the query:
#
# SELECT
# a.company,
# a.num,
# stopa.name,
# stopb.name
# FROM
# routes a
# JOIN
# routes b ON (a.company = b.company AND a.num = b.num)
# JOIN
# stops stopa ON (a.stop_id = stopa.id)
# JOIN
# stops stopb ON (b.stop_id = stopb.id)
# WHERE
# stopa.name = 'Craiglockhart'
#
# The query shown is similar to the previous one, however by joining two
# copies of the stops table we can refer to stops by name rather than by
# number. Change the query so that the services between 'Craiglockhart' and
# 'London Road' are shown.
execute(<<-SQL)
SQL
end
def haymarket_and_leith
# Give the company and num of the services that connect stops
# 115 and 137 ('Haymarket' and 'Leith')
execute(<<-SQL)
SQL
end
def craiglockhart_and_tollcross
# Give the company and num of the services that connect stops
# 'Craiglockhart' and 'Tollcross'
execute(<<-SQL)
SQL
end
def start_at_craiglockhart
# Give a distinct list of the stops that can be reached from 'Craiglockhart'
# by taking one bus, including 'Craiglockhart' itself. Include the stop name,
# as well as the company and bus no. of the relevant service.
execute(<<-SQL)
SQL
end
def craiglockhart_to_sighthill
# Find the routes involving two buses that can go from Craiglockhart to
# Sighthill. Show the bus no. and company for the first bus, the name of the
# stop for the transfer, and the bus no. and company for the second bus.
execute(<<-SQL)
SQL
end
| 22.806897 | 79 | 0.655277 |
286daca44b03407c4785b94db71f0d5e15ffab76 | 3,891 | class Rust < Formula
desc "Safe, concurrent, practical language"
homepage "https://www.rust-lang.org/"
# license ["Apache-2.0", "MIT"] - pending https://github.com/Homebrew/brew/pull/7953
license "Apache-2.0"
stable do
url "https://static.rust-lang.org/dist/rustc-1.45.1-src.tar.gz"
sha256 "ea53e6424e3d1fe56c6d77a00e72c5d594b509ec920c5a779a7b8e1dbd74219b"
resource "cargo" do
url "https://github.com/rust-lang/cargo.git",
tag: "0.46.1",
revision: "f242df6edb897f6f69d393a22bb257f5af0f52d0"
end
end
bottle do
cellar :any
sha256 "825ab73358b796efc7be4f47fe1c62009cd43b05b2631acfaac343baedf737b8" => :catalina
sha256 "f641fd3fb4902b66e5334aa8b9c26c1c80e81e378a0c013facc2a83dc546a827" => :mojave
sha256 "20041687555f8de92b5209ee898104ed9ff29a5a8810f74a239a4214c41f39a5" => :high_sierra
end
head do
url "https://github.com/rust-lang/rust.git"
resource "cargo" do
url "https://github.com/rust-lang/cargo.git"
end
end
depends_on "cmake" => :build
depends_on "[email protected]" => :build
depends_on "libssh2"
depends_on "[email protected]"
depends_on "pkg-config"
uses_from_macos "curl"
uses_from_macos "zlib"
resource "cargobootstrap" do
# From https://github.com/rust-lang/rust/blob/#{version}/src/stage0.txt
url "https://static.rust-lang.org/dist/2020-06-04/cargo-0.45.0-x86_64-apple-darwin.tar.gz"
sha256 "3a618459c8a22773a299d683e4ea0355e615372ae573300933caf6d00019bdd3"
end
def install
ENV.prepend_path "PATH", Formula["[email protected]"].opt_libexec/"bin"
# Fix build failure for compiler_builtins "error: invalid deployment target
# for -stdlib=libc++ (requires OS X 10.7 or later)"
ENV["MACOSX_DEPLOYMENT_TARGET"] = MacOS.version
# Ensure that the `openssl` crate picks up the intended library.
# https://crates.io/crates/openssl#manual-configuration
ENV["OPENSSL_DIR"] = Formula["[email protected]"].opt_prefix
# Fix build failure for cmake v0.1.24 "error: internal compiler error:
# src/librustc/ty/subst.rs:127: impossible case reached" on 10.11, and for
# libgit2-sys-0.6.12 "fatal error: 'os/availability.h' file not found
# #include <os/availability.h>" on 10.11 and "SecTrust.h:170:67: error:
# expected ';' after top level declarator" among other errors on 10.12
ENV["SDKROOT"] = MacOS.sdk_path
args = ["--prefix=#{prefix}"]
if build.head?
args << "--disable-rpath"
args << "--release-channel=nightly"
else
args << "--release-channel=stable"
end
system "./configure", *args
system "make"
system "make", "install"
resource("cargobootstrap").stage do
system "./install.sh", "--prefix=#{buildpath}/cargobootstrap"
end
ENV.prepend_path "PATH", buildpath/"cargobootstrap/bin"
resource("cargo").stage do
ENV["RUSTC"] = bin/"rustc"
args = %W[--root #{prefix} --path . --features curl-sys/force-system-lib-on-osx]
system "cargo", "install", *args
man1.install Dir["src/etc/man/*.1"]
bash_completion.install "src/etc/cargo.bashcomp.sh"
zsh_completion.install "src/etc/_cargo"
end
rm_rf prefix/"lib/rustlib/uninstall.sh"
rm_rf prefix/"lib/rustlib/install.log"
end
def post_install
Dir["#{lib}/rustlib/**/*.dylib"].each do |dylib|
chmod 0664, dylib
MachO::Tools.change_dylib_id(dylib, "@rpath/#{File.basename(dylib)}")
chmod 0444, dylib
end
end
test do
system "#{bin}/rustdoc", "-h"
(testpath/"hello.rs").write <<~EOS
fn main() {
println!("Hello World!");
}
EOS
system "#{bin}/rustc", "hello.rs"
assert_equal "Hello World!\n", `./hello`
system "#{bin}/cargo", "new", "hello_world", "--bin"
assert_equal "Hello, world!",
(testpath/"hello_world").cd { `#{bin}/cargo run`.split("\n").last }
end
end
| 33.25641 | 94 | 0.668209 |
1a67543d5207fe52ec22ebcf17bd82a23b706f17 | 302 | cask :v1 => 'node' do
version '0.10.36'
sha256 'f6702b77c7b2f269834acab2210fc5bf43bc20467652ddefb55ccec61c58193a'
url "http://nodejs.org/dist/v#{version}/node-v#{version}.pkg"
homepage 'http://nodejs.org'
license :mit
pkg "node-v#{version}.pkg"
uninstall :pkgutil => 'org.nodejs'
end
| 23.230769 | 75 | 0.708609 |
263dc99b96f00c24e06a364a003b406cbb04250a | 1,825 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "flu_vaccination/version"
Gem::Specification.new do |spec|
spec.name = "flu_vaccination"
spec.version = ::VERSION
spec.authors = ["TODO: Write your name"]
spec.email = ["TODO: Write your email address"]
spec.summary = %q{TODO: Write a short summary, because RubyGems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.17"
spec.add_development_dependency "rake", "~> 10.0"
end
| 43.452381 | 96 | 0.675616 |
f7bf7397d89687d465e05f37540bdb352349d3f9 | 592 | Pod::Spec.new do |s|
s.name = "HYPWebView"
s.summary = "WebView++"
s.version = "0.1.0"
s.homepage = "https://github.com/hyperoslo/HYPWebView"
s.license = 'MIT'
s.author = { "Hyper Interaktiv AS" => "[email protected]" }
s.source = { :git => "https://github.com/hyperoslo/HYPWebView.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/hyperoslo'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Source/**/*'
s.frameworks = 'UIKit'
s.dependency 'SVProgressHUD'
end
| 37 | 104 | 0.567568 |
6ac62f831bb6cd5c1364579e01c3165cabde92b9 | 9,231 | # Copyright (C) 2013-2016 VMware, Inc.
provider_path = Pathname.new(__FILE__).parent.parent
require File.join(provider_path, 'vcd')
Puppet::Type.type(:vcd_system).provide(:vcd_system, :parent => Puppet::Provider::Vcd) do
@doc = 'Manage vcd system settings'
include PuppetX::VMware::Util
def general_settings
url = prefix_uri_path('api/admin/extension/settings/general')
@general_settings ||=
begin
results = nested_value(get(url), [ 'vmext:GeneralSettings' ] )
end
end
def system_settings
url = prefix_uri_path('api/admin/extension/settings')
@system_settings ||=
begin
results = nested_value(get(url), [ 'vmext:SystemSettings' ] )
end
end
def camel(prop)
camel_prop = PuppetX::VMware::Util.camelize(prop, :upper)
end
# set the order for elements since vcd expects them this way
# TODO, see if there is a way to get these dynamically
def settings_order
{ :amqp_settings => [
'vmext:AmqpHost',
'vmext:AmqpPort',
'vmext:AmqpUsername',
'vmext:AmqpPassword',
'vmext:AmqpExchange',
'vmext:AmqpVHost',
'vmext:AmqpUseSSL',
'vmext:AmqpSslAcceptAll',
'vmext:AmqpPrefix',
],
:branding_settings => [
'vmext:CompanyName',
'vmext:LoginPageCustomizationTheme',
'vmext:Theme',
'vmext:PreviewCustomTheme',
'vmext:FinalCustomTheme',
'vmext:AboutCompanyUrl',
'vmext:SupportUrl',
'vmext:SignUpUrl',
'vmext:ForgotUserNameOrPasswordURL',
],
:general_settings => [
'vmext:AbsoluteSessionTimeoutMinutes',
'vmext:ActivityLogDisplayDays',
'vmext:ActivityLogKeepDays',
'vmext:AllowOverlappingExtNets',
'vmext:ChargebackEventsKeepDays',
'vmext:ChargebackTablesCleanupJobTimeInSeconds',
'vmext:ConsoleProxyExternalAddress',
'vmext:HostCheckDelayInSeconds',
'vmext:HostCheckTimeoutSeconds',
'vmext:InstallationId',
'vmext:IpReservationTimeoutSeconds',
'vmext:SyslogServerSettings',
'vmext:LoginNameOnly',
'vmext:PrePopDefaultName',
'vmext:QuarantineEnabled',
'vmext:QuarantineResponseTimeoutSeconds',
'vmext:RestApiBaseHttpUri',
'vmext:RestApiBaseUri',
'vmext:RestApiBaseUriPublicCertChain',
'vmext:SessionTimeoutMinutes',
'vmext:ShowStackTraces',
'vmext:SyncStartDate',
'vmext:SyncIntervalInHours',
'vmext:SystemExternalHttpAddress',
'vmext:SystemExternalAddress',
'vmext:SystemExternalAddressPublicCertChain',
'vmext:TransferSessionTimeoutSeconds',
'vmext:VerifyVcCertificates',
'vmext:VcTruststorePassword',
'vmext:VcTruststoreContents',
'vmext:VcTruststoreType',
'vmext:VmrcVersion',
'vmext:VerifyVsmCertificates',
'vmext:ElasticAllocationPool',
],
:email_settings => [
'vmext:SenderEmailAddress',
'vmext:EmailSubjectPrefix',
'vmext:EmailToAllAdmins',
'vmext:AlertEmailToAllAdmins',
'vmext:AlertEmailTo',
'vmext:SmtpSettings',
],
:ldap_settings => [
'vmext:HostName',
'vmext:Port',
'vmext:IsSsl',
'vmext:IsSslAcceptAll',
'vmext:Realm',
'vmext:PagedSearchDisabled',
'vmext:PageSize',
'vmext:MaxResults',
'vmext:MaxUserGroups',
'vmext:SearchBase',
'vmext:UserName',
'vmext:Password',
'vmext:AuthenticationMechanism',
'vmext:GroupSearchBase',
'vmext:IsGroupSearchBaseEnabled',
'vmext:ConnectorType',
'vmext:UserAttributes',
'vmext:GroupAttributes',
'vmext:UseExternalKerberos',
],
:ldap_user_attributes => [
'vmext:ObjectClass',
'vmext:ObjectIdentifier',
'vmext:UserName',
'vmext:Email',
'vmext:FullName',
'vmext:GivenName',
'vmext:Surname',
'vmext:Telephone',
'vmext:GroupMembershipIdentifier',
'vmext:GroupBackLinkIdentifier',
],
:ldap_group_attributes => [
'vmext:ObjectClass',
'vmext:ObjectIdentifier',
'vmext:GroupName',
'vmext:Membership',
'vmext:MembershipIdentifier',
'vmext:BackLinkIdentifier',
],
:password_policy => [
'vmext:AccountLockoutEnabled',
'vmext:AdminAccountLockoutEnabled',
'vmext:InvalidLoginsBeforeLockout',
'vmext:AccountLockoutIntervalMinutes',
],
# not a property, but sub level of email_settings
:smtp_settings => [
'vmext:UseAuthentication',
'vmext:SmtpServerName',
'vmext:SmtpServerPort',
'vmext:ssl',
'vmext:Username',
'vmext:Password',
],
# not a property, but sub level of general_settings
:syslog_order => [
'vcloud:SyslogServerIp1',
'vcloud:SyslogServerIp2',
],
}
end
Puppet::Type.type(:vcd_system).properties.collect{|x| x.name}.reject{|x| x == :ensure}.each do |prop|
define_method(prop) do
system_settings["vmext:#{camel(prop)}"]
end
define_method("#{prop}=".to_sym) do |value|
camel_prop = camel(prop)
vmext_name = "vmext:#{camel_prop}"
type = system_settings[vmext_name]['@type']
raise("type was not found for property: #{prop}") if not type
# substitute host url in case backend host differs from customer showing url
prop_link = uri_path(system_settings[vmext_name]['@href'])
raise("href was not found for property: #{prop}") if not prop_link
data = replace_properties(prop,value)
# specify any required sub-level ordering
if settings_order[prop]
case prop.to_s
when 'email_settings'
data[vmext_name]['vmext:SmtpSettings'][:order!] = settings_order[:smtp_settings] & nested_value(data, [vmext_name,'vmext:SmtpSettings'],{}).keys
when 'general_settings'
data[vmext_name]['vmext:SyslogServerSettings'] ||= {}
data[vmext_name]['vmext:SyslogServerSettings'][:order!] = settings_order[:syslog_order] & nested_value(data, [vmext_name, 'vmext:SyslogServerSettings'],{}).keys
when 'ldap_settings'
data[vmext_name]['vmext:UserAttributes'] ||= {}
data[vmext_name]['vmext:GroupAttributes'] ||= {}
data[vmext_name]['vmext:UserAttributes'][:order!] = settings_order[:ldap_user_attributes] & nested_value(data, [vmext_name, 'vmext:UserAttributes'],{}).keys
data[vmext_name]['vmext:GroupAttributes'][:order!] = settings_order[:ldap_group_attributes] & nested_value(data, [vmext_name, 'vmext:GroupAttributes'],{}).keys
else
# do nothing by default
end
data[vmext_name][:order!] = settings_order[prop] & nested_value(data, [vmext_name],{}).keys
end
Puppet.debug("updating #{prop}, using the path: #{prop_link} and the type: #{type}")
put(prop_link, data, type)
end
end
def rem_nil_vals(data)
# TODO, make rem_nil_vals deal with array of hashes
data.each do |k, v|
case v
when NilClass
data.delete(k)
when Array
# do nothing, not gonna handle array of hashes for now, but maybe in future
when Hash
rem_nil_vals(v)
else
# do nothing
end
end
data
end
def replace_properties(prop,value)
vmext_name = "vmext:#{camel(prop)}"
type = system_settings[vmext_name]['@type']
data = { vmext_name => rem_nil_vals(system_settings[vmext_name]).merge(value) }
vmext_attr = vmw_extension['@xmlns:vmext']
vcloud_attr = vmw_extension['@xmlns:vcloud']
data[vmext_name]['@xmlns:vmext'] = vmext_attr
data[vmext_name]['@xmlns:vcloud'] = vcloud_attr
# even though api doc says this is ok, it is not
data[vmext_name].delete('vcloud:Link')
data
end
# lookup service has its own way of doing things
def lookup_service_settings=(lookup_settings)
vmext_name = 'vmext:LookupServiceSettings'
unreg_msg = "lookup service has already been registered, you must unregister before continuing"
fail(unreg_msg) if not system_settings[vmext_name]['vmext:LookupServiceUrl'].nil?
fail("lookup_service_username not found in parameters") if not resource[:lookup_service_username]
fail("lookup_service_password not found in parameters") if not resource[:lookup_service_password]
data = replace_properties('lookup_service_settings', resource[:lookup_service_settings])
vmext_name_params = 'vmext:LookupServiceParams'
data[vmext_name_params] = data[vmext_name]
data.delete(vmext_name)
data[vmext_name_params]['@userName'] = resource[:lookup_service_username]
data[vmext_name_params]['@password'] = resource[:lookup_service_password]
# per internal docs, needs to be set this way versus using what is defined as the @type
type = 'application/*+xml'
prop_link = uri_path(system_settings[vmext_name]['@href'])
Puppet.debug("updating lookup_service_settings, using the path: #{prop_link} and the type: #{type}")
put(prop_link, data, type)
end
end
| 36.2 | 170 | 0.649767 |
917575db6151d0d395a8b4455cfdb6dcae778bf8 | 177 | Rails40::Application.config.secret_key_base = '62c740bbc79cd7fa4dfabc7365c0f7ec98204cfae45a41d2e1423d6084bb762e9310acf46fc7e4cc6215a90c7aeef4107b9f1c7e3b68623fd11af43b716ccf5e'
| 88.5 | 176 | 0.943503 |
e2de8259f86b0ea730fc38c79fe0d82c514b7c46 | 174 | # Example class with Elasticsearch persistence
class PersistentArticle
include Tire::Model::Persistence
property :title
property :published_on
property :tags
end
| 14.5 | 46 | 0.787356 |
bba499898243e712b418338bc08d61189fc329d8 | 301 | class AdminUser < ActiveRecord::Base
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
def to_s
email
end
end
| 27.363636 | 62 | 0.740864 |
33121902a2a4147d63960cd98922db7890bf1171 | 3,801 | # Generated via
# `rails generate curation_concerns:work GenericWork`
module CurationConcerns
class GenericWorksController < ApplicationController
include CurationConcerns::CurationConcernController
# Adds Sufia behaviors to the controller.
include Sufia::WorksControllerBehavior
self.curation_concern_type = GenericWork
self.show_presenter = CurationConcerns::GenericWorkShowPresenter
# our custom local layout intended for public show page, but does
# not seem to mess up admin pages also in this controller.
layout "chf"
# returns JSON for the viewer, an array of hashes, one for each image
# included in this work to be viewed.
# Note we needed to make this action auth right with a custom line in
# in our ability.rb class.
def viewer_images_info
render json: helpers.viewer_images_info(presenter)
end
protected
# override from curation_concerns to add additional response formats to #show
def additional_response_formats(wants)
wants.ris do
# Terrible hack to get download name from our helper
download_name = helpers._download_name_base(presenter) + ".ris"
headers["Content-Disposition"] = ApplicationHelper.encoding_safe_content_disposition(download_name)
render body: CHF::RisSerializer.new(presenter).to_ris
end
wants.csl do
# Terrible hack to get download name from our helper
download_name = helpers._download_name_base(presenter) + ".json"
headers["Content-Disposition"] = ApplicationHelper.encoding_safe_content_disposition(download_name)
render body: CHF::CitableAttributes.new(presenter).to_csl_json
end
# Provide our OAI-PMH representation as "xml", useful for debugging,
# maybe useful for clients.
wants.xml do
render xml: CHF::OaiDcSerialization.new(curation_concern_from_search_results).to_oai_dc(xml_decleration: true)
end
end
# Pretty hacky way to override the t() I18n method when called from template:
# https://github.com/projecthydra/sufia/blob/8bb451451a492e443687f8c5aff4882cac56a131/app/views/curation_concerns/base/_relationships_parent_row.html.erb
# ...so we can catch what would have been "In Generic work" and replace with
# "Part of", while still calling super for everything else, to try and
# avoid breaking anything else.
#
# The way this is set up upstream, I honestly couldn't figure out
# a better way to intervene without higher chance of forwards-compat
# problems on upgrades. It could not be overridden just in i18n to do
# what we want.
module HelperOverride
def t(key, interpolations = {})
if key == ".label" && interpolations[:type] == "Generic work"
"Part of:"
else
super
end
end
end
helper HelperOverride
# Adds the 'My Works' breadcrumb; we only want this for logged-in users
# overrides https://github.com/samvera/sufia/blob/v7.3.1/app/controllers/concerns/sufia/works_controller_behavior.rb#L93
def add_breadcrumb_for_controller
super if current_ability.current_user.logged_in?
end
# Show breadcrumbs to all users, even if they're not logged in...
def show_breadcrumbs?
true # this overrides the default case in application_controller.rb .
end
# ... but, for not-logged-in users, only show the "Back to Search Results" breacrumb.
def build_breadcrumbs
super
# This method is in application_controller.rb :
filter_breadcrumbs(@breadcrumbs)
end
# overriding presenter to pass in view_context
def presenter(*args)
super.tap do |pres|
pres.view_context = view_context if pres.respond_to?(:view_context=)
end
end
end
end
| 38.01 | 157 | 0.71639 |
117d721e7900e5fe1f63622849f6b2c9c31b4e95 | 134 | class String #:nodoc:
def indent(n)
if n >= 0
gsub(/^/, ' ' * n)
else
gsub(/^ {0,#{-n}}/, "")
end
end
end
| 13.4 | 29 | 0.402985 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.